From 8ebf43875b2306a1ccfc55a56dd37568a03ad6dc Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Sat, 2 Jun 2018 14:36:07 +0100 Subject: [PATCH 01/41] feat: modularise block, add mocha and test suite utils License: MIT Signed-off-by: Alan Shaw --- js/src/block.js | 161 ------------------------------------------ js/src/block/get.js | 63 +++++++++++++++++ js/src/block/index.js | 10 +++ js/src/block/put.js | 104 +++++++++++++++++++++++++++ js/src/block/stat.js | 50 +++++++++++++ js/src/utils/mocha.js | 10 +++ js/src/utils/suite.js | 14 ++++ 7 files changed, 251 insertions(+), 161 deletions(-) delete mode 100644 js/src/block.js create mode 100644 js/src/block/get.js create mode 100644 js/src/block/index.js create mode 100644 js/src/block/put.js create mode 100644 js/src/block/stat.js create mode 100644 js/src/utils/mocha.js create mode 100644 js/src/utils/suite.js diff --git a/js/src/block.js b/js/src/block.js deleted file mode 100644 index 38219db5b..000000000 --- a/js/src/block.js +++ /dev/null @@ -1,161 +0,0 @@ -/* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - -'use strict' - -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) -const Block = require('ipfs-block') -const multihash = require('multihashes') -const CID = require('cids') -const Buffer = require('safe-buffer').Buffer - -function expectKey (block, expected, callback) { - expect(block.cid.multihash).to.eql(expected) - callback() -} - -module.exports = (common) => { - describe('.block', () => { - let ipfs - - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => common.teardown(done)) - - describe('.put', () => { - it('a buffer, using defaults', (done) => { - const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' - const blob = new Buffer('blorb') - - ipfs.block.put(blob, (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.be.eql(blob) - expectKey(block, multihash.fromB58String(expectedHash), done) - }) - }) - - it('a buffer, using CID', (done) => { - const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' - const cid = new CID(expectedHash) - const blob = new Buffer('blorb') - - ipfs.block.put(blob, { cid: cid }, (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.be.eql(blob) - expectKey(block, multihash.fromB58String(expectedHash), done) - }) - }) - - it('a buffer, using options', (done) => { - const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' - const blob = new Buffer('blorb') - - ipfs.block.put(blob, { - format: 'dag-pb', - mhtype: 'sha2-256', - version: 0 - }, (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.be.eql(blob) - expectKey(block, multihash.fromB58String(expectedHash), done) - }) - }) - - it('a Block instance', (done) => { - const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' - const cid = new CID(expectedHash) - const b = new Block(new Buffer('blorb'), cid) - - ipfs.block.put(b, (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.eql(new Buffer('blorb')) - expectKey(block, multihash.fromB58String(expectedHash), done) - }) - }) - - it('error with array of blocks', (done) => { - const blob = Buffer('blorb') - - ipfs.block.put([blob, blob], (err) => { - expect(err).to.be.an.instanceof(Error) - done() - }) - }) - - // TODO it.skip('Promises support', (done) => {}) - }) - - describe('.get', () => { - it('by CID object', (done) => { - const hash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' - const cid = new CID(hash) - - ipfs.block.get(cid, (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.eql(new Buffer('blorb')) - expectKey(block, cid.multihash, done) - }) - }) - - it('by CID in Str', (done) => { - const hash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' - - ipfs.block.get(hash, (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.eql(new Buffer('blorb')) - expectKey(block, multihash.fromB58String(hash), done) - }) - }) - - it('should get an empty block', (done) => { - ipfs.block.put(Buffer.alloc(0), { - format: 'dag-pb', - mhtype: 'sha2-256', - version: 0 - }, (err, block) => { - expect(err).to.not.exist() - - ipfs.block.get(block.cid, (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.eql(Buffer.alloc(0)) - done() - }) - }) - }) - - // TODO it.skip('Promises support', (done) => {}) - }) - - describe('.stat', () => { - it('by CID', (done) => { - const hash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' - const cid = new CID(hash) - - ipfs.block.stat(cid, (err, stats) => { - expect(err).to.not.exist() - expect(stats).to.have.property('key') - expect(stats).to.have.property('size') - done() - }) - }) - - // TODO it.skip('Promises support', (done) => {}) - }) - }) -} diff --git a/js/src/block/get.js b/js/src/block/get.js new file mode 100644 index 000000000..da2e0ccad --- /dev/null +++ b/js/src/block/get.js @@ -0,0 +1,63 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const multihash = require('multihashes') +const CID = require('cids') +const Buffer = require('safe-buffer').Buffer +const { getDescribe } = require('../utils/mocha') + +module.exports = (common, options) => { + const describe = getDescribe(options) + + describe('.block.get', function () { + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should get by CID object', (done) => { + const hash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' + const cid = new CID(hash) + + ipfs.block.get(cid, (err, block) => { + expect(err).to.not.exist() + expect(block.data).to.eql(new Buffer('blorb')) + expect(block.cid.multihash).to.eql(cid.multihash) + done() + }) + }) + + it('should get by CID in string', (done) => { + const hash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' + + ipfs.block.get(hash, (err, block) => { + expect(err).to.not.exist() + expect(block.data).to.eql(new Buffer('blorb')) + expect(block.cid.multihash).to.eql(multihash.fromB58String(hash)) + done() + }) + }) + + // TODO it.skip('Promises support', (done) => {}) + }) +} diff --git a/js/src/block/index.js b/js/src/block/index.js new file mode 100644 index 000000000..df97d4c72 --- /dev/null +++ b/js/src/block/index.js @@ -0,0 +1,10 @@ +'use strict' +const { createSuite } = require('../utils/suite') + +const tests = { + put: require('./put'), + get: require('./get'), + stat: require('./stat') +} + +module.exports = createSuite(tests) diff --git a/js/src/block/put.js b/js/src/block/put.js new file mode 100644 index 000000000..752cb7bc4 --- /dev/null +++ b/js/src/block/put.js @@ -0,0 +1,104 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const Block = require('ipfs-block') +const multihash = require('multihashes') +const CID = require('cids') +const Buffer = require('safe-buffer').Buffer +const { getDescribe } = require('../utils/mocha') + +module.exports = (common, options) => { + const describe = getDescribe(options) + + describe('.block.put', () => { + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should put a buffer, using defaults', (done) => { + const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' + const blob = new Buffer('blorb') + + ipfs.block.put(blob, (err, block) => { + expect(err).to.not.exist() + expect(block.data).to.be.eql(blob) + expect(block.cid.multihash).to.eql(multihash.fromB58String(expectedHash)) + done() + }) + }) + + it('should put a buffer, using CID', (done) => { + const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' + const cid = new CID(expectedHash) + const blob = new Buffer('blorb') + + ipfs.block.put(blob, { cid: cid }, (err, block) => { + expect(err).to.not.exist() + expect(block.data).to.be.eql(blob) + expect(block.cid.multihash).to.eql(multihash.fromB58String(expectedHash)) + done() + }) + }) + + it('should put a buffer, using options', (done) => { + const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' + const blob = new Buffer('blorb') + + ipfs.block.put(blob, { + format: 'dag-pb', + mhtype: 'sha2-256', + version: 0 + }, (err, block) => { + expect(err).to.not.exist() + expect(block.data).to.be.eql(blob) + expect(block.cid.multihash).to.eql(multihash.fromB58String(expectedHash)) + done() + }) + }) + + it('should put a Block instance', (done) => { + const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' + const cid = new CID(expectedHash) + const b = new Block(new Buffer('blorb'), cid) + + ipfs.block.put(b, (err, block) => { + expect(err).to.not.exist() + expect(block.data).to.eql(new Buffer('blorb')) + expect(block.cid.multihash).to.eql(multihash.fromB58String(expectedHash)) + done() + }) + }) + + it('should error with array of blocks', (done) => { + const blob = Buffer('blorb') + + ipfs.block.put([blob, blob], (err) => { + expect(err).to.be.an.instanceof(Error) + done() + }) + }) + + // TODO it.skip('Promises support', (done) => {}) + }) +} diff --git a/js/src/block/stat.js b/js/src/block/stat.js new file mode 100644 index 000000000..3df16b438 --- /dev/null +++ b/js/src/block/stat.js @@ -0,0 +1,50 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const CID = require('cids') +const { getDescribe } = require('../utils/mocha') + +module.exports = (common, options) => { + const describe = getDescribe(options) + + describe('.block.stat', () => { + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should stat by CID', (done) => { + const hash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' + const cid = new CID(hash) + + ipfs.block.stat(cid, (err, stats) => { + expect(err).to.not.exist() + expect(stats).to.have.property('key') + expect(stats).to.have.property('size') + done() + }) + }) + + // TODO it.skip('Promises support', (done) => {}) + }) +} diff --git a/js/src/utils/mocha.js b/js/src/utils/mocha.js new file mode 100644 index 000000000..af3981ace --- /dev/null +++ b/js/src/utils/mocha.js @@ -0,0 +1,10 @@ +/* eslint-env mocha */ + +// Get a describe function that is optionally 'skipped' or 'onlyed' +function getDescribe (config) { + if (config && config.skip) return describe.skip + if (config && config.only) return describe.only + return describe +} + +module.exports.getDescribe = getDescribe diff --git a/js/src/utils/suite.js b/js/src/utils/suite.js new file mode 100644 index 000000000..67c10bad4 --- /dev/null +++ b/js/src/utils/suite.js @@ -0,0 +1,14 @@ +function createSuite (tests) { + const suite = (common, options) => { + Object.keys(tests).forEach(t => { + const opts = Object.assign({}, options) + opts.skip = Array.isArray(opts.skip) ? opts.skip.includes(t) : opts.skip + opts.only = Array.isArray(opts.only) ? opts.only.includes(t) : opts.only + tests[t](common, opts) + }) + } + + return Object.assign(suite, tests) +} + +module.exports.createSuite = createSuite From 1cc38ef7fb8aacfd48c4df008b5f7878378bec4c Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Mon, 4 Jun 2018 15:55:11 +0100 Subject: [PATCH 02/41] fix: isolated block tests License: MIT Signed-off-by: Alan Shaw --- js/src/block/get.js | 30 ++++++++++++++++-------------- js/src/block/put.js | 13 +++++++------ js/src/block/stat.js | 24 ++++++++++++++---------- js/src/utils/suite.js | 4 ++-- 4 files changed, 39 insertions(+), 32 deletions(-) diff --git a/js/src/block/get.js b/js/src/block/get.js index da2e0ccad..d25c93123 100644 --- a/js/src/block/get.js +++ b/js/src/block/get.js @@ -10,33 +10,37 @@ chai.use(dirtyChai) const multihash = require('multihashes') const CID = require('cids') const Buffer = require('safe-buffer').Buffer +const auto = require('async/auto') const { getDescribe } = require('../utils/mocha') -module.exports = (common, options) => { +module.exports = (createCommon, options) => { const describe = getDescribe(options) + const common = createCommon() describe('.block.get', function () { - let ipfs + const data = Buffer.from('blorb') + let ipfs, hash before(function (done) { // CI takes longer to instantiate the daemon, so we need to increase the // timeout for the before step this.timeout(60 * 1000) - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) + auto({ + factory: (cb) => common.setup(cb), + ipfs: ['factory', (res, cb) => res.factory.spawnNode(cb)], + block: ['ipfs', (res, cb) => res.ipfs.block.put(data, cb)] + }, (err, res) => { + if (err) return done(err) + ipfs = res.ipfs + hash = res.block.cid.multihash + done() }) }) after((done) => common.teardown(done)) it('should get by CID object', (done) => { - const hash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' const cid = new CID(hash) ipfs.block.get(cid, (err, block) => { @@ -48,12 +52,10 @@ module.exports = (common, options) => { }) it('should get by CID in string', (done) => { - const hash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' - - ipfs.block.get(hash, (err, block) => { + ipfs.block.get(multihash.toB58String(hash), (err, block) => { expect(err).to.not.exist() expect(block.data).to.eql(new Buffer('blorb')) - expect(block.cid.multihash).to.eql(multihash.fromB58String(hash)) + expect(block.cid.multihash).to.eql(hash) done() }) }) diff --git a/js/src/block/put.js b/js/src/block/put.js index 752cb7bc4..10474d4e6 100644 --- a/js/src/block/put.js +++ b/js/src/block/put.js @@ -13,8 +13,9 @@ const CID = require('cids') const Buffer = require('safe-buffer').Buffer const { getDescribe } = require('../utils/mocha') -module.exports = (common, options) => { +module.exports = (createCommon, options) => { const describe = getDescribe(options) + const common = createCommon() describe('.block.put', () => { let ipfs @@ -38,7 +39,7 @@ module.exports = (common, options) => { it('should put a buffer, using defaults', (done) => { const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' - const blob = new Buffer('blorb') + const blob = Buffer.from('blorb') ipfs.block.put(blob, (err, block) => { expect(err).to.not.exist() @@ -51,7 +52,7 @@ module.exports = (common, options) => { it('should put a buffer, using CID', (done) => { const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' const cid = new CID(expectedHash) - const blob = new Buffer('blorb') + const blob = Buffer.from('blorb') ipfs.block.put(blob, { cid: cid }, (err, block) => { expect(err).to.not.exist() @@ -63,7 +64,7 @@ module.exports = (common, options) => { it('should put a buffer, using options', (done) => { const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' - const blob = new Buffer('blorb') + const blob = Buffer.from('blorb') ipfs.block.put(blob, { format: 'dag-pb', @@ -80,7 +81,7 @@ module.exports = (common, options) => { it('should put a Block instance', (done) => { const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' const cid = new CID(expectedHash) - const b = new Block(new Buffer('blorb'), cid) + const b = new Block(Buffer.from('blorb'), cid) ipfs.block.put(b, (err, block) => { expect(err).to.not.exist() @@ -91,7 +92,7 @@ module.exports = (common, options) => { }) it('should error with array of blocks', (done) => { - const blob = Buffer('blorb') + const blob = Buffer.from('blorb') ipfs.block.put([blob, blob], (err) => { expect(err).to.be.an.instanceof(Error) diff --git a/js/src/block/stat.js b/js/src/block/stat.js index 3df16b438..6fe605149 100644 --- a/js/src/block/stat.js +++ b/js/src/block/stat.js @@ -8,33 +8,37 @@ const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) const CID = require('cids') +const auto = require('async/auto') const { getDescribe } = require('../utils/mocha') -module.exports = (common, options) => { +module.exports = (createCommon, options) => { const describe = getDescribe(options) + const common = createCommon() describe('.block.stat', () => { - let ipfs + const data = Buffer.from('blorb') + let ipfs, hash before(function (done) { // CI takes longer to instantiate the daemon, so we need to increase the // timeout for the before step this.timeout(60 * 1000) - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) + auto({ + factory: (cb) => common.setup(cb), + ipfs: ['factory', (res, cb) => res.factory.spawnNode(cb)], + block: ['ipfs', (res, cb) => res.ipfs.block.put(data, cb)] + }, (err, res) => { + if (err) return done(err) + ipfs = res.ipfs + hash = res.block.cid.multihash + done() }) }) after((done) => common.teardown(done)) it('should stat by CID', (done) => { - const hash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' const cid = new CID(hash) ipfs.block.stat(cid, (err, stats) => { diff --git a/js/src/utils/suite.js b/js/src/utils/suite.js index 67c10bad4..0ad2cdbab 100644 --- a/js/src/utils/suite.js +++ b/js/src/utils/suite.js @@ -1,10 +1,10 @@ function createSuite (tests) { - const suite = (common, options) => { + const suite = (createCommon, options) => { Object.keys(tests).forEach(t => { const opts = Object.assign({}, options) opts.skip = Array.isArray(opts.skip) ? opts.skip.includes(t) : opts.skip opts.only = Array.isArray(opts.only) ? opts.only.includes(t) : opts.only - tests[t](common, opts) + tests[t](createCommon, opts) }) } From 269df8caa98c7b20fbde59ef8ea4a4c38b5c73b9 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Mon, 4 Jun 2018 15:55:46 +0100 Subject: [PATCH 03/41] docs: updates code examples License: MIT Signed-off-by: Alan Shaw --- README.md | 60 +++++++++++++++++++++++++++++++++++++++++++++---------- 1 file changed, 50 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index 316fa4442..419bb2dae 100644 --- a/README.md +++ b/README.md @@ -52,6 +52,7 @@ Include this badge in your readme if you make a new module that implements inter ## Install In JavaScript land: + ```js npm install interface-ipfs-core ``` @@ -68,20 +69,59 @@ In Go land: Install `interface-ipfs-core` as one of the dependencies of your project and as a test file. Then, using `mocha` (for Node.js) or a test runner with compatible API, do: -``` -var test = require('interface-ipfs-core') - -var common = { - setup: function (cb) { - cb(null, IPFSFactory) +```js +const tests = require('interface-ipfs-core') + +// Create common setup and teardown +const createCommon = () => ({ + // Do some setup common to all tests + setup (cb) { + // Must call back with an "IPFS factory", an object with a `spawnNode` method + cb(null, { + // Use ipfsd-ctl or other to spawn an IPFS node for testing + spawnNode (cb) { /* ... */ } + }) }, - teardown: function (cb) { + // Dispose of nodes created by the IPFS factory and any other teardown + teardown (cb) { cb() } -} +}) + +tests.block(createCommon) +tests.config(createCommon) +tests.dag(createCommon) +// ...etc. (see js/src/index.js) +``` + +#### Running tests by command + +```js +tests.repo.version(createCommon) +``` + +#### Skipping tests + +```js +tests.repo.version(createCommon) +tests.repo.stat(createCommon) +tests.repo.gc(createCommon, { skip: true }) // pass an options object to skip these tests + +// OR, at the subsystem level + +tests.repo(createCommon, { skip: ['gc'] }) +``` + +#### Running only some tests + +```js +tests.repo.version(createCommon) +tests.repo.stat(createCommon) +tests.repo.gc(createCommon, { only: true }) // pass an options object to run only these tests + +// OR, at the subsystem level -// use all of the test suits -test.all(common) +tests.repo(createCommon, { only: ['gc'] }) ``` ### Go From 949b64c0b892cc5fda5575764c36dde42ef4d793 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Mon, 4 Jun 2018 21:38:10 +0100 Subject: [PATCH 04/41] feat(bootstrap): modularised bootstrap tests License: MIT Signed-off-by: Alan Shaw --- js/src/bootstrap.js | 116 -------------------------------------- js/src/bootstrap/add.js | 68 ++++++++++++++++++++++ js/src/bootstrap/index.js | 10 ++++ js/src/bootstrap/list.js | 46 +++++++++++++++ js/src/bootstrap/rm.js | 75 ++++++++++++++++++++++++ 5 files changed, 199 insertions(+), 116 deletions(-) delete mode 100644 js/src/bootstrap.js create mode 100644 js/src/bootstrap/add.js create mode 100644 js/src/bootstrap/index.js create mode 100644 js/src/bootstrap/list.js create mode 100644 js/src/bootstrap/rm.js diff --git a/js/src/bootstrap.js b/js/src/bootstrap.js deleted file mode 100644 index cb81ddbf2..000000000 --- a/js/src/bootstrap.js +++ /dev/null @@ -1,116 +0,0 @@ -/* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ -'use strict' - -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) - -const invalidArg = 'this/Is/So/Invalid/' -const validIp4 = '/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z' - -module.exports = (common) => { - describe('.bootstrap', function () { - this.timeout(100 * 1000) - - let ipfs - let peers - - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => common.teardown(done)) - - describe('.add', () => { - it('returns an error when called with an invalid arg', (done) => { - ipfs.bootstrap.add(invalidArg, (err) => { - expect(err).to.be.an.instanceof(Error) - done() - }) - }) - - it('returns a list of containing the bootstrap peer when called with a valid arg (ip4)', (done) => { - ipfs.bootstrap.add(validIp4, (err, res) => { - expect(err).to.not.exist() - expect(res).to.be.eql({ Peers: [validIp4] }) - peers = res.Peers - expect(peers).to.exist() - expect(peers.length).to.eql(1) - done() - }) - }) - - it('returns a list of bootstrap peers when called with the default option', (done) => { - ipfs.bootstrap.add(null, { default: true }, (err, res) => { - expect(err).to.not.exist() - peers = res.Peers - expect(peers).to.exist() - expect(peers.length).to.above(1) - done() - }) - }) - }) - - describe('.list', () => { - it('returns a list of peers', (done) => { - ipfs.bootstrap.list((err, res) => { - expect(err).to.not.exist() - peers = res.Peers - expect(peers).to.exist() - done() - }) - }) - }) - - describe('.rm', () => { - it('returns an error when called with an invalid arg', (done) => { - ipfs.bootstrap.rm(invalidArg, (err) => { - expect(err).to.be.an.instanceof(Error) - done() - }) - }) - - it('returns empty list because no peers removed when called without an arg or options', (done) => { - ipfs.bootstrap.rm(null, (err, res) => { - expect(err).to.not.exist() - peers = res.Peers - expect(peers).to.exist() - expect(peers.length).to.eql(0) - done() - }) - }) - - it('returns list containing the peer removed when called with a valid arg (ip4)', (done) => { - ipfs.bootstrap.rm(null, (err, res) => { - expect(err).to.not.exist() - peers = res.Peers - expect(peers).to.exist() - expect(peers.length).to.eql(0) - done() - }) - }) - - it('returns list of all peers removed when all option is passed', (done) => { - ipfs.bootstrap.rm(null, { all: true }, (err, res) => { - expect(err).to.not.exist() - peers = res.Peers - expect(peers).to.exist() - done() - }) - }) - }) - }) -} diff --git a/js/src/bootstrap/add.js b/js/src/bootstrap/add.js new file mode 100644 index 000000000..b060a07aa --- /dev/null +++ b/js/src/bootstrap/add.js @@ -0,0 +1,68 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { getDescribe } = require('../utils/mocha') + +const invalidArg = 'this/Is/So/Invalid/' +const validIp4 = '/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z' + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const common = createCommon() + + describe('.bootstrap.add', function () { + this.timeout(100 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should return an error when called with an invalid arg', (done) => { + ipfs.bootstrap.add(invalidArg, (err) => { + expect(err).to.be.an.instanceof(Error) + done() + }) + }) + + it('should return a list containing the bootstrap peer when called with a valid arg (ip4)', (done) => { + ipfs.bootstrap.add(validIp4, (err, res) => { + expect(err).to.not.exist() + expect(res).to.be.eql({ Peers: [validIp4] }) + const peers = res.Peers + expect(peers).to.exist() + expect(peers.length).to.eql(1) + done() + }) + }) + + it('should return a list of bootstrap peers when called with the default option', (done) => { + ipfs.bootstrap.add(null, { default: true }, (err, res) => { + expect(err).to.not.exist() + const peers = res.Peers + expect(peers).to.exist() + expect(peers.length).to.above(1) + done() + }) + }) + }) +} diff --git a/js/src/bootstrap/index.js b/js/src/bootstrap/index.js new file mode 100644 index 000000000..858d97747 --- /dev/null +++ b/js/src/bootstrap/index.js @@ -0,0 +1,10 @@ +'use strict' +const { createSuite } = require('../utils/suite') + +const tests = { + add: require('./add'), + list: require('./list'), + rm: require('./rm') +} + +module.exports = createSuite(tests) diff --git a/js/src/bootstrap/list.js b/js/src/bootstrap/list.js new file mode 100644 index 000000000..b23ae9618 --- /dev/null +++ b/js/src/bootstrap/list.js @@ -0,0 +1,46 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { getDescribe } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const common = createCommon() + + describe('.bootstrap.list', function () { + this.timeout(100 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should return a list of peers', (done) => { + ipfs.bootstrap.list((err, res) => { + expect(err).to.not.exist() + const peers = res.Peers + expect(peers).to.exist() + done() + }) + }) + }) +} diff --git a/js/src/bootstrap/rm.js b/js/src/bootstrap/rm.js new file mode 100644 index 000000000..e292f164c --- /dev/null +++ b/js/src/bootstrap/rm.js @@ -0,0 +1,75 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { getDescribe } = require('../utils/mocha') + +const invalidArg = 'this/Is/So/Invalid/' + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const common = createCommon() + + describe('.bootstrap.rm', function () { + this.timeout(100 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should return an error when called with an invalid arg', (done) => { + ipfs.bootstrap.rm(invalidArg, (err) => { + expect(err).to.be.an.instanceof(Error) + done() + }) + }) + + it('should return an empty list because no peers removed when called without an arg or options', (done) => { + ipfs.bootstrap.rm(null, (err, res) => { + expect(err).to.not.exist() + const peers = res.Peers + expect(peers).to.exist() + expect(peers.length).to.eql(0) + done() + }) + }) + + it('should return a list containing the peer removed when called with a valid arg (ip4)', (done) => { + ipfs.bootstrap.rm(null, (err, res) => { + expect(err).to.not.exist() + const peers = res.Peers + expect(peers).to.exist() + expect(peers.length).to.eql(0) + done() + }) + }) + + it('should return a list of all peers removed when all option is passed', (done) => { + ipfs.bootstrap.rm(null, { all: true }, (err, res) => { + expect(err).to.not.exist() + const peers = res.Peers + expect(peers).to.exist() + done() + }) + }) + }) +} From c17e1942cba891355da610e02a1501241f0441c2 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Mon, 4 Jun 2018 22:03:24 +0100 Subject: [PATCH 05/41] feat(config): modularise config License: MIT Signed-off-by: Alan Shaw --- js/src/config.js | 168 --------------------------------------- js/src/config/get.js | 82 +++++++++++++++++++ js/src/config/index.js | 10 +++ js/src/config/replace.js | 63 +++++++++++++++ js/src/config/set.js | 94 ++++++++++++++++++++++ 5 files changed, 249 insertions(+), 168 deletions(-) delete mode 100644 js/src/config.js create mode 100644 js/src/config/get.js create mode 100644 js/src/config/index.js create mode 100644 js/src/config/replace.js create mode 100644 js/src/config/set.js diff --git a/js/src/config.js b/js/src/config.js deleted file mode 100644 index 1cff56c81..000000000 --- a/js/src/config.js +++ /dev/null @@ -1,168 +0,0 @@ -/* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - -'use strict' - -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) - -module.exports = (common) => { - describe('.config', function () { - this.timeout(30 * 1000) - let ipfs - - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => common.teardown(done)) - - describe('.get', () => { - it('retrieve the whole config', (done) => { - ipfs.config.get((err, config) => { - expect(err).to.not.exist() - expect(config).to.exist() - done() - }) - }) - - it('retrieve a value through a key', (done) => { - ipfs.config.get('Identity.PeerID', (err, peerId) => { - expect(err).to.not.exist() - expect(peerId).to.exist() - done() - }) - }) - - it('retrieve a value through a nested key', (done) => { - ipfs.config.get('Addresses.Swarm', (err, swarmAddrs) => { - expect(err).to.not.exist() - expect(swarmAddrs).to.exist() - done() - }) - }) - - it('fail on non valid key', (done) => { - ipfs.config.get(1234, (err, peerId) => { - expect(err).to.exist() - done() - }) - }) - - it('fail on non exist()ent key', (done) => { - ipfs.config.get('Bananas', (err, peerId) => { - expect(err).to.exist() - done() - }) - }) - - it('Promises support', () => { - return ipfs.config.get() - .then((config) => { - expect(config).to.exist() - }) - }) - }) - - describe('.set', () => { - it('set a new key', (done) => { - ipfs.config.set('Fruit', 'banana', (err) => { - expect(err).to.not.exist() - ipfs.config.get('Fruit', (err, fruit) => { - expect(err).to.not.exist() - expect(fruit).to.equal('banana') - done() - }) - }) - }) - - it('set an already exist()ing key', (done) => { - ipfs.config.set('Fruit', 'morango', (err) => { - expect(err).to.not.exist() - ipfs.config.get('Fruit', (err, fruit) => { - expect(err).to.not.exist() - expect(fruit).to.equal('morango') - done() - }) - }) - }) - - it('set a JSON object', (done) => { - const key = 'API.HTTPHeaders.Access-Control-Allow-Origin' - const val = ['http://example.io'] - ipfs.config.set(key, val, function (err) { - expect(err).to.not.exist() - ipfs.config.get(key, function (err, result) { - expect(err).to.not.exist() - expect(result).to.deep.equal(val) - done() - }) - }) - }) - - it('fail on non valid key', (done) => { - ipfs.config.set(Buffer.from('heeey'), '', (err) => { - expect(err).to.exist() - done() - }) - }) - - it('fail on non valid value', (done) => { - ipfs.config.set('Fruit', Buffer.from('abc'), (err) => { - expect(err).to.exist() - done() - }) - }) - - it('Promises support', () => { - return ipfs.config.set('Fruit', 'banana') - .then(() => ipfs.config.get('Fruit')) - .then((fruit) => { - expect(fruit).to.equal('banana') - }) - }) - }) - - // Waiting for fix on go-ipfs - // - https://github.com/ipfs/js-ipfs-api/pull/307#discussion_r69281789 - // - https://github.com/ipfs/go-ipfs/issues/2927 - describe.skip('.replace', () => { - const config = { - Fruit: 'Bananas' - } - - it('replace the whole config', (done) => { - ipfs.config.replace(config, (err) => { - expect(err).to.not.exist() - ipfs.config.get((err, _config) => { - expect(err).to.not.exist() - expect(_config).to.deep.equal(config) - }) - }) - }) - - it('replace to empty config', (done) => { - ipfs.config.replace({}, (err) => { - expect(err).to.not.exist() - ipfs.config.get((err, _config) => { - expect(err).to.not.exist() - expect(_config).to.deep.equal(config) - }) - }) - }) - }) - }) -} diff --git a/js/src/config/get.js b/js/src/config/get.js new file mode 100644 index 000000000..4b2444a4e --- /dev/null +++ b/js/src/config/get.js @@ -0,0 +1,82 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { getDescribe } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const common = createCommon() + + describe('.config.get', function () { + this.timeout(30 * 1000) + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should retrieve the whole config', (done) => { + ipfs.config.get((err, config) => { + expect(err).to.not.exist() + expect(config).to.exist() + done() + }) + }) + + it('should retrieve the whole config (promised)', () => { + return ipfs.config.get() + .then((config) => { + expect(config).to.exist() + }) + }) + + it('should retrieve a value through a key', (done) => { + ipfs.config.get('Identity.PeerID', (err, peerId) => { + expect(err).to.not.exist() + expect(peerId).to.exist() + done() + }) + }) + + it('should retrieve a value through a nested key', (done) => { + ipfs.config.get('Addresses.Swarm', (err, swarmAddrs) => { + expect(err).to.not.exist() + expect(swarmAddrs).to.exist() + done() + }) + }) + + it('should fail on non valid key', (done) => { + ipfs.config.get(1234, (err, peerId) => { + expect(err).to.exist() + done() + }) + }) + + it('should fail on non existent key', (done) => { + ipfs.config.get('Bananas', (err, peerId) => { + expect(err).to.exist() + done() + }) + }) + }) +} diff --git a/js/src/config/index.js b/js/src/config/index.js new file mode 100644 index 000000000..d17d34238 --- /dev/null +++ b/js/src/config/index.js @@ -0,0 +1,10 @@ +'use strict' +const { createSuite } = require('../utils/suite') + +const tests = { + get: require('./get'), + set: require('./set'), + replace: require('./replace') +} + +module.exports = createSuite(tests) diff --git a/js/src/config/replace.js b/js/src/config/replace.js new file mode 100644 index 000000000..f8db929f9 --- /dev/null +++ b/js/src/config/replace.js @@ -0,0 +1,63 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { getDescribe } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const common = createCommon() + + describe('.config.replace', function () { + this.timeout(30 * 1000) + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + const config = { + Fruit: 'Bananas' + } + + it('should replace the whole config', (done) => { + ipfs.config.replace(config, (err) => { + expect(err).to.not.exist() + ipfs.config.get((err, _config) => { + expect(err).to.not.exist() + expect(_config).to.deep.equal(config) + done() + }) + }) + }) + + it('should replace to empty config', (done) => { + ipfs.config.replace({}, (err) => { + expect(err).to.not.exist() + ipfs.config.get((err, _config) => { + expect(err).to.not.exist() + expect(_config).to.deep.equal({}) + done() + }) + }) + }) + }) +} diff --git a/js/src/config/set.js b/js/src/config/set.js new file mode 100644 index 000000000..ee29d7d3d --- /dev/null +++ b/js/src/config/set.js @@ -0,0 +1,94 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { getDescribe } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const common = createCommon() + + describe('.config.set', function () { + this.timeout(30 * 1000) + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should set a new key', (done) => { + ipfs.config.set('Fruit', 'banana', (err) => { + expect(err).to.not.exist() + ipfs.config.get('Fruit', (err, fruit) => { + expect(err).to.not.exist() + expect(fruit).to.equal('banana') + done() + }) + }) + }) + + it('should set a new key (promised)', () => { + return ipfs.config.set('Fruit', 'banana') + .then(() => ipfs.config.get('Fruit')) + .then((fruit) => { + expect(fruit).to.equal('banana') + }) + }) + + it('should set an already existing key', (done) => { + ipfs.config.set('Fruit', 'morango', (err) => { + expect(err).to.not.exist() + ipfs.config.get('Fruit', (err, fruit) => { + expect(err).to.not.exist() + expect(fruit).to.equal('morango') + done() + }) + }) + }) + + it('should set a JSON object', (done) => { + const key = 'API.HTTPHeaders.Access-Control-Allow-Origin' + const val = ['http://example.io'] + ipfs.config.set(key, val, function (err) { + expect(err).to.not.exist() + ipfs.config.get(key, function (err, result) { + expect(err).to.not.exist() + expect(result).to.deep.equal(val) + done() + }) + }) + }) + + it('should fail on non valid key', (done) => { + ipfs.config.set(Buffer.from('heeey'), '', (err) => { + expect(err).to.exist() + done() + }) + }) + + it('should fail on non valid value', (done) => { + ipfs.config.set('Fruit', Buffer.from('abc'), (err) => { + expect(err).to.exist() + done() + }) + }) + }) +} From bf9a6714247d18db9e44b2b18cdae26479f51a98 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Tue, 5 Jun 2018 11:21:24 +0100 Subject: [PATCH 06/41] feat(dag): modularise dag License: MIT Signed-off-by: Alan Shaw --- js/src/dag.js | 450 -------------------------------------------- js/src/dag/get.js | 240 +++++++++++++++++++++++ js/src/dag/index.js | 10 + js/src/dag/put.js | 133 +++++++++++++ js/src/dag/tree.js | 141 ++++++++++++++ 5 files changed, 524 insertions(+), 450 deletions(-) delete mode 100644 js/src/dag.js create mode 100644 js/src/dag/get.js create mode 100644 js/src/dag/index.js create mode 100644 js/src/dag/put.js create mode 100644 js/src/dag/tree.js diff --git a/js/src/dag.js b/js/src/dag.js deleted file mode 100644 index d337bd045..000000000 --- a/js/src/dag.js +++ /dev/null @@ -1,450 +0,0 @@ -/* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - -'use strict' - -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) -const series = require('async/series') -const pull = require('pull-stream') -const dagPB = require('ipld-dag-pb') -const DAGNode = dagPB.DAGNode -const dagCBOR = require('ipld-dag-cbor') -const CID = require('cids') -const { spawnNodeWithId } = require('./utils/spawn') - -module.exports = (common) => { - describe('.dag', () => { - let ipfs - let withGo - - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - ipfs = node - withGo = node.peerId.agentVersion.startsWith('go-ipfs') - done() - }) - }) - }) - - after((done) => common.teardown(done)) - - let pbNode - let cborNode - - before((done) => { - const someData = Buffer.from('some data') - - pbNode = DAGNode.create(someData, (err, node) => { - expect(err).to.not.exist() - pbNode = node - done() - }) - - cborNode = { - data: someData - } - }) - - describe('.put', () => { - it('dag-pb with default hash func (sha2-256)', (done) => { - ipfs.dag.put(pbNode, { - format: 'dag-pb', - hashAlg: 'sha2-256' - }, done) - }) - - it('dag-pb with custom hash func (sha3-512)', (done) => { - ipfs.dag.put(pbNode, { - format: 'dag-pb', - hashAlg: 'sha3-512' - }, done) - }) - - // This works because dag-cbor will just treat pbNode as a regular object - it.skip('dag-pb node with wrong multicodec', (done) => { - ipfs.dag.put(pbNode, 'dag-cbor', 'sha3-512', (err) => { - expect(err).to.exist() - done() - }) - }) - - it('dag-cbor with default hash func (sha2-256)', (done) => { - ipfs.dag.put(cborNode, { - format: 'dag-cbor', - hashAlg: 'sha2-256' - }, done) - }) - - it('dag-cbor with custom hash func (sha3-512)', (done) => { - ipfs.dag.put(cborNode, { - format: 'dag-cbor', - hashAlg: 'sha3-512' - }, done) - }) - - it('dag-cbor node with wrong multicodec', function (done) { - // This works in go-ipfs because dag-pb will serialize any object. If - // the object has neither a `data` nor `links` field it's serialized - // as an empty object - if (withGo) { - this.skip() - } - ipfs.dag.put(cborNode, { - format: 'dag-pb', - hashAlg: 'sha3-512' - }, (err) => { - expect(err).to.exist() - done() - }) - }) - - it('returns the cid', (done) => { - ipfs.dag.put(cborNode, { - format: 'dag-cbor', - hashAlg: 'sha2-256' - }, (err, cid) => { - expect(err).to.not.exist() - expect(cid).to.exist() - expect(CID.isCID(cid)).to.equal(true) - dagCBOR.util.cid(cborNode, (err, _cid) => { - expect(err).to.not.exist() - expect(cid.buffer).to.eql(_cid.buffer) - done() - }) - }) - }) - - it.skip('pass the cid instead of format and hashAlg', (done) => {}) - - // TODO it.skip('Promises support', (done) => {}) - }) - - describe('.get', () => { - let pbNode - let cborNode - - let nodePb - let nodeCbor - let cidPb - let cidCbor - - before((done) => { - series([ - (cb) => { - const someData = Buffer.from('some other data') - - pbNode = DAGNode.create(someData, (err, node) => { - expect(err).to.not.exist() - pbNode = node - cb() - }) - - cborNode = { - data: someData - } - }, - (cb) => { - dagPB.DAGNode.create(Buffer.from('I am inside a Protobuf'), (err, node) => { - expect(err).to.not.exist() - nodePb = node - cb() - }) - }, - (cb) => { - dagPB.util.cid(nodePb, (err, cid) => { - expect(err).to.not.exist() - cidPb = cid - cb() - }) - }, - (cb) => { - nodeCbor = { - someData: 'I am inside a Cbor object', - pb: { '/': cidPb.toBaseEncodedString() } - } - - dagCBOR.util.cid(nodeCbor, (err, cid) => { - expect(err).to.not.exist() - cidCbor = cid - cb() - }) - } - ], store) - - function store () { - pull( - pull.values([ - { node: nodePb, multicodec: 'dag-pb', hashAlg: 'sha2-256' }, - { node: nodeCbor, multicodec: 'dag-cbor', hashAlg: 'sha2-256' } - ]), - pull.asyncMap((el, cb) => { - ipfs.dag.put(el.node, { - format: el.multicodec, - hashAlg: el.hashAlg - }, cb) - }), - pull.onEnd(done) - ) - } - }) - - it('dag-pb node', (done) => { - ipfs.dag.put(pbNode, { - format: 'dag-pb', - hashAlg: 'sha2-256' - }, (err, cid) => { - expect(err).to.not.exist() - ipfs.dag.get(cid, (err, result) => { - expect(err).to.not.exist() - const node = result.value - expect(pbNode.toJSON()).to.eql(node.toJSON()) - done() - }) - }) - }) - - it('dag-cbor node', (done) => { - ipfs.dag.put(cborNode, { - format: 'dag-cbor', - hashAlg: 'sha2-256' - }, (err, cid) => { - expect(err).to.not.exist() - ipfs.dag.get(cid, (err, result) => { - expect(err).to.not.exist() - - const node = result.value - expect(cborNode).to.eql(node) - done() - }) - }) - }) - - describe('with path', () => { - it('dag-pb get the node', (done) => { - ipfs.dag.get(cidPb, '/', (err, result) => { - expect(err).to.not.exist() - - const node = result.value - - dagPB.util.cid(node, (err, cid) => { - expect(err).to.not.exist() - expect(cid).to.eql(cidPb) - done() - }) - }) - }) - - it('dag-pb local scope', function (done) { - // TODO vmx 2018-02-22: Currently not supported in go-ipfs, it might - // be possible once https://github.com/ipfs/go-ipfs/issues/4728 is - // done - if (withGo) { - this.skip() - } - ipfs.dag.get(cidPb, 'Data', (err, result) => { - expect(err).to.not.exist() - expect(result.value).to.eql(Buffer.from('I am inside a Protobuf')) - done() - }) - }) - - it.skip('dag-pb one level', (done) => {}) - it.skip('dag-pb two levels', (done) => {}) - - it('dag-cbor get the node', (done) => { - ipfs.dag.get(cidCbor, '/', (err, result) => { - expect(err).to.not.exist() - - const node = result.value - - dagCBOR.util.cid(node, (err, cid) => { - expect(err).to.not.exist() - expect(cid).to.eql(cidCbor) - done() - }) - }) - }) - - it('dag-cbor local scope', (done) => { - ipfs.dag.get(cidCbor, 'someData', (err, result) => { - expect(err).to.not.exist() - expect(result.value).to.eql('I am inside a Cbor object') - done() - }) - }) - - it.skip('dag-cbor one level', (done) => {}) - it.skip('dag-cbor two levels', (done) => {}) - it.skip('from dag-pb to dag-cbor', (done) => {}) - - it('from dag-cbor to dag-pb', function (done) { - // TODO vmx 2018-02-22: Currently not supported in go-ipfs, it might - // be possible once https://github.com/ipfs/go-ipfs/issues/4728 is - // done - if (withGo) { - this.skip() - } - ipfs.dag.get(cidCbor, 'pb/Data', (err, result) => { - expect(err).to.not.exist() - expect(result.value).to.eql(Buffer.from('I am inside a Protobuf')) - done() - }) - }) - - it('CID String', (done) => { - const cidCborStr = cidCbor.toBaseEncodedString() - - ipfs.dag.get(cidCborStr, (err, result) => { - expect(err).to.not.exist() - - const node = result.value - - dagCBOR.util.cid(node, (err, cid) => { - expect(err).to.not.exist() - expect(cid).to.eql(cidCbor) - done() - }) - }) - }) - - it('CID String + path', function (done) { - // TODO vmx 2018-02-22: Currently not supported in go-ipfs, it might - // be possible once https://github.com/ipfs/go-ipfs/issues/4728 is - // done - if (withGo) { - this.skip() - } - const cidCborStr = cidCbor.toBaseEncodedString() - - ipfs.dag.get(cidCborStr + '/pb/Data', (err, result) => { - expect(err).to.not.exist() - expect(result.value).to.eql(Buffer.from('I am inside a Protobuf')) - done() - }) - }) - }) - }) - - describe('.tree', function () { - let nodePb - let nodeCbor - let cidPb - let cidCbor - - before(function (done) { - // TODO vmx 2018-02-22: Currently the tree API is not exposed in go-ipfs - if (withGo) { - this.skip() - } - series([ - (cb) => { - dagPB.DAGNode.create(Buffer.from('I am inside a Protobuf'), (err, node) => { - expect(err).to.not.exist() - nodePb = node - cb() - }) - }, - (cb) => { - dagPB.util.cid(nodePb, (err, cid) => { - expect(err).to.not.exist() - cidPb = cid - cb() - }) - }, - (cb) => { - nodeCbor = { - someData: 'I am inside a Cbor object', - pb: { '/': cidPb.toBaseEncodedString() } - } - - dagCBOR.util.cid(nodeCbor, (err, cid) => { - expect(err).to.not.exist() - cidCbor = cid - cb() - }) - } - ], store) - - function store () { - pull( - pull.values([ - { node: nodePb, multicodec: 'dag-pb', hashAlg: 'sha2-256' }, - { node: nodeCbor, multicodec: 'dag-cbor', hashAlg: 'sha2-256' } - ]), - pull.asyncMap((el, cb) => { - ipfs.dag.put(el.node, { - format: el.multicodec, - hashAlg: el.hashAlg - }, cb) - }), - pull.onEnd(done) - ) - } - }) - - it('.tree with CID', (done) => { - ipfs.dag.tree(cidCbor, (err, paths) => { - expect(err).to.not.exist() - expect(paths).to.eql([ - 'pb', - 'someData' - ]) - done() - }) - }) - - it('.tree with CID and path', (done) => { - ipfs.dag.tree(cidCbor, 'someData', (err, paths) => { - expect(err).to.not.exist() - expect(paths).to.eql([]) - done() - }) - }) - - it('.tree with CID and path as String', (done) => { - const cidCborStr = cidCbor.toBaseEncodedString() - - ipfs.dag.tree(cidCborStr + '/someData', (err, paths) => { - expect(err).to.not.exist() - expect(paths).to.eql([]) - done() - }) - }) - - it('.tree with CID recursive (accross different formats)', (done) => { - ipfs.dag.tree(cidCbor, { recursive: true }, (err, paths) => { - expect(err).to.not.exist() - expect(paths).to.eql([ - 'pb', - 'someData', - 'pb/Links', - 'pb/Data' - ]) - done() - }) - }) - - it('.tree with CID and path recursive', (done) => { - ipfs.dag.tree(cidCbor, 'pb', { recursive: true }, (err, paths) => { - expect(err).to.not.exist() - expect(paths).to.eql([ - 'Links', - 'Data' - ]) - done() - }) - }) - }) - }) -} diff --git a/js/src/dag/get.js b/js/src/dag/get.js new file mode 100644 index 000000000..91154b907 --- /dev/null +++ b/js/src/dag/get.js @@ -0,0 +1,240 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { series, eachSeries } = require('async') +const dagPB = require('ipld-dag-pb') +const DAGNode = dagPB.DAGNode +const dagCBOR = require('ipld-dag-cbor') +const { spawnNodeWithId } = require('../utils/spawn') +const { getDescribe } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const common = createCommon() + + describe('.dag.get', () => { + let ipfs + let withGo + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + + spawnNodeWithId(factory, (err, node) => { + expect(err).to.not.exist() + ipfs = node + withGo = node.peerId.agentVersion.startsWith('go-ipfs') + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + let pbNode + let cborNode + let nodePb + let nodeCbor + let cidPb + let cidCbor + + before((done) => { + series([ + (cb) => { + const someData = Buffer.from('some other data') + + pbNode = DAGNode.create(someData, (err, node) => { + expect(err).to.not.exist() + pbNode = node + cb() + }) + + cborNode = { + data: someData + } + }, + (cb) => { + dagPB.DAGNode.create(Buffer.from('I am inside a Protobuf'), (err, node) => { + expect(err).to.not.exist() + nodePb = node + cb() + }) + }, + (cb) => { + dagPB.util.cid(nodePb, (err, cid) => { + expect(err).to.not.exist() + cidPb = cid + cb() + }) + }, + (cb) => { + nodeCbor = { + someData: 'I am inside a Cbor object', + pb: { '/': cidPb.toBaseEncodedString() } + } + + dagCBOR.util.cid(nodeCbor, (err, cid) => { + expect(err).to.not.exist() + cidCbor = cid + cb() + }) + }, + (cb) => { + eachSeries([ + { node: nodePb, multicodec: 'dag-pb', hashAlg: 'sha2-256' }, + { node: nodeCbor, multicodec: 'dag-cbor', hashAlg: 'sha2-256' } + ], (el, cb) => { + ipfs.dag.put(el.node, { + format: el.multicodec, + hashAlg: el.hashAlg + }, cb) + }, cb) + } + ], done) + }) + + it('should get a dag-pb node', (done) => { + ipfs.dag.put(pbNode, { + format: 'dag-pb', + hashAlg: 'sha2-256' + }, (err, cid) => { + expect(err).to.not.exist() + ipfs.dag.get(cid, (err, result) => { + expect(err).to.not.exist() + const node = result.value + expect(pbNode.toJSON()).to.eql(node.toJSON()) + done() + }) + }) + }) + + it('should get a dag-cbor node', (done) => { + ipfs.dag.put(cborNode, { + format: 'dag-cbor', + hashAlg: 'sha2-256' + }, (err, cid) => { + expect(err).to.not.exist() + ipfs.dag.get(cid, (err, result) => { + expect(err).to.not.exist() + + const node = result.value + expect(cborNode).to.eql(node) + done() + }) + }) + }) + + it('should get a dag-pb node with path', (done) => { + ipfs.dag.get(cidPb, '/', (err, result) => { + expect(err).to.not.exist() + + const node = result.value + + dagPB.util.cid(node, (err, cid) => { + expect(err).to.not.exist() + expect(cid).to.eql(cidPb) + done() + }) + }) + }) + + it('should get a dag-pb node local value', function (done) { + // TODO vmx 2018-02-22: Currently not supported in go-ipfs, it might + // be possible once https://github.com/ipfs/go-ipfs/issues/4728 is + // done + if (withGo) { + this.skip() + } + ipfs.dag.get(cidPb, 'Data', (err, result) => { + expect(err).to.not.exist() + expect(result.value).to.eql(Buffer.from('I am inside a Protobuf')) + done() + }) + }) + + it.skip('should get a dag-pb node value one level deep', (done) => {}) + it.skip('should get a dag-pb node value two levels deep', (done) => {}) + + it('should get a dag-cbor node with path', (done) => { + ipfs.dag.get(cidCbor, '/', (err, result) => { + expect(err).to.not.exist() + + const node = result.value + + dagCBOR.util.cid(node, (err, cid) => { + expect(err).to.not.exist() + expect(cid).to.eql(cidCbor) + done() + }) + }) + }) + + it('should get a dag-cbor node local value', (done) => { + ipfs.dag.get(cidCbor, 'someData', (err, result) => { + expect(err).to.not.exist() + expect(result.value).to.eql('I am inside a Cbor object') + done() + }) + }) + + it.skip('should get dag-cbor node value one level deep', (done) => {}) + it.skip('should get dag-cbor node value two levels deep', (done) => {}) + it.skip('should get dag-cbor value via dag-pb node', (done) => {}) + + it('should get dag-pb value via dag-cbor node', function (done) { + // TODO vmx 2018-02-22: Currently not supported in go-ipfs, it might + // be possible once https://github.com/ipfs/go-ipfs/issues/4728 is + // done + if (withGo) { + this.skip() + } + ipfs.dag.get(cidCbor, 'pb/Data', (err, result) => { + expect(err).to.not.exist() + expect(result.value).to.eql(Buffer.from('I am inside a Protobuf')) + done() + }) + }) + + it('should get by CID string', (done) => { + const cidCborStr = cidCbor.toBaseEncodedString() + + ipfs.dag.get(cidCborStr, (err, result) => { + expect(err).to.not.exist() + + const node = result.value + + dagCBOR.util.cid(node, (err, cid) => { + expect(err).to.not.exist() + expect(cid).to.eql(cidCbor) + done() + }) + }) + }) + + it('should get by CID string + path', function (done) { + // TODO vmx 2018-02-22: Currently not supported in go-ipfs, it might + // be possible once https://github.com/ipfs/go-ipfs/issues/4728 is + // done + if (withGo) { + this.skip() + } + const cidCborStr = cidCbor.toBaseEncodedString() + + ipfs.dag.get(cidCborStr + '/pb/Data', (err, result) => { + expect(err).to.not.exist() + expect(result.value).to.eql(Buffer.from('I am inside a Protobuf')) + done() + }) + }) + }) +} diff --git a/js/src/dag/index.js b/js/src/dag/index.js new file mode 100644 index 000000000..217ee5370 --- /dev/null +++ b/js/src/dag/index.js @@ -0,0 +1,10 @@ +'use strict' +const { createSuite } = require('../utils/suite') + +const tests = { + get: require('./get'), + put: require('./put'), + tree: require('./tree') +} + +module.exports = createSuite(tests) diff --git a/js/src/dag/put.js b/js/src/dag/put.js new file mode 100644 index 000000000..0e87eb53f --- /dev/null +++ b/js/src/dag/put.js @@ -0,0 +1,133 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const dagPB = require('ipld-dag-pb') +const DAGNode = dagPB.DAGNode +const dagCBOR = require('ipld-dag-cbor') +const CID = require('cids') +const { spawnNodeWithId } = require('../utils/spawn') +const { getDescribe } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const common = createCommon() + + describe('.dag.put', () => { + let ipfs + let withGo + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + + spawnNodeWithId(factory, (err, node) => { + expect(err).to.not.exist() + ipfs = node + withGo = node.peerId.agentVersion.startsWith('go-ipfs') + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + let pbNode + let cborNode + + before((done) => { + const someData = Buffer.from('some data') + + pbNode = DAGNode.create(someData, (err, node) => { + expect(err).to.not.exist() + pbNode = node + done() + }) + + cborNode = { + data: someData + } + }) + + it('should put dag-pb with default hash func (sha2-256)', (done) => { + ipfs.dag.put(pbNode, { + format: 'dag-pb', + hashAlg: 'sha2-256' + }, done) + }) + + it('should put dag-pb with custom hash func (sha3-512)', (done) => { + ipfs.dag.put(pbNode, { + format: 'dag-pb', + hashAlg: 'sha3-512' + }, done) + }) + + // This works because dag-cbor will just treat pbNode as a regular object + it.skip('should not put dag-pb node with wrong multicodec', (done) => { + ipfs.dag.put(pbNode, 'dag-cbor', 'sha3-512', (err) => { + expect(err).to.exist() + done() + }) + }) + + it('should put dag-cbor with default hash func (sha2-256)', (done) => { + ipfs.dag.put(cborNode, { + format: 'dag-cbor', + hashAlg: 'sha2-256' + }, done) + }) + + it('should put dag-cbor with custom hash func (sha3-512)', (done) => { + ipfs.dag.put(cborNode, { + format: 'dag-cbor', + hashAlg: 'sha3-512' + }, done) + }) + + it('should not put dag-cbor node with wrong multicodec', function (done) { + // This works in go-ipfs because dag-pb will serialize any object. If + // the object has neither a `data` nor `links` field it's serialized + // as an empty object + if (withGo) { + this.skip() + } + ipfs.dag.put(cborNode, { + format: 'dag-pb', + hashAlg: 'sha3-512' + }, (err) => { + expect(err).to.exist() + done() + }) + }) + + it('should return the cid', (done) => { + ipfs.dag.put(cborNode, { + format: 'dag-cbor', + hashAlg: 'sha2-256' + }, (err, cid) => { + expect(err).to.not.exist() + expect(cid).to.exist() + expect(CID.isCID(cid)).to.equal(true) + dagCBOR.util.cid(cborNode, (err, _cid) => { + expect(err).to.not.exist() + expect(cid.buffer).to.eql(_cid.buffer) + done() + }) + }) + }) + + it.skip('should put by passing the cid instead of format and hashAlg', (done) => {}) + + // TODO it.skip('Promises support', (done) => {}) + }) +} diff --git a/js/src/dag/tree.js b/js/src/dag/tree.js new file mode 100644 index 000000000..e57c71efe --- /dev/null +++ b/js/src/dag/tree.js @@ -0,0 +1,141 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { series, eachSeries } = require('async') +const dagPB = require('ipld-dag-pb') +const dagCBOR = require('ipld-dag-cbor') +const { spawnNodeWithId } = require('../utils/spawn') +const { getDescribe } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const common = createCommon() + + describe('.dag.tree', () => { + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + + spawnNodeWithId(factory, (err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + let nodePb + let nodeCbor + let cidPb + let cidCbor + + before(function (done) { + series([ + (cb) => { + dagPB.DAGNode.create(Buffer.from('I am inside a Protobuf'), (err, node) => { + expect(err).to.not.exist() + nodePb = node + cb() + }) + }, + (cb) => { + dagPB.util.cid(nodePb, (err, cid) => { + expect(err).to.not.exist() + cidPb = cid + cb() + }) + }, + (cb) => { + nodeCbor = { + someData: 'I am inside a Cbor object', + pb: { '/': cidPb.toBaseEncodedString() } + } + + dagCBOR.util.cid(nodeCbor, (err, cid) => { + expect(err).to.not.exist() + cidCbor = cid + cb() + }) + }, + (cb) => { + eachSeries([ + { node: nodePb, multicodec: 'dag-pb', hashAlg: 'sha2-256' }, + { node: nodeCbor, multicodec: 'dag-cbor', hashAlg: 'sha2-256' } + ], (el, cb) => { + ipfs.dag.put(el.node, { + format: el.multicodec, + hashAlg: el.hashAlg + }, cb) + }, cb) + } + ], done) + }) + + it('should get tree with CID', (done) => { + ipfs.dag.tree(cidCbor, (err, paths) => { + expect(err).to.not.exist() + expect(paths).to.eql([ + 'pb', + 'someData' + ]) + done() + }) + }) + + it('should get tree with CID and path', (done) => { + ipfs.dag.tree(cidCbor, 'someData', (err, paths) => { + expect(err).to.not.exist() + expect(paths).to.eql([]) + done() + }) + }) + + it('should get tree with CID and path as String', (done) => { + const cidCborStr = cidCbor.toBaseEncodedString() + + ipfs.dag.tree(cidCborStr + '/someData', (err, paths) => { + expect(err).to.not.exist() + expect(paths).to.eql([]) + done() + }) + }) + + it('should get tree with CID recursive (accross different formats)', (done) => { + ipfs.dag.tree(cidCbor, { recursive: true }, (err, paths) => { + expect(err).to.not.exist() + expect(paths).to.eql([ + 'pb', + 'someData', + 'pb/Links', + 'pb/Data' + ]) + done() + }) + }) + + it('should get tree with CID and path recursive', (done) => { + ipfs.dag.tree(cidCbor, 'pb', { recursive: true }, (err, paths) => { + expect(err).to.not.exist() + expect(paths).to.eql([ + 'Links', + 'Data' + ]) + done() + }) + }) + }) +} From 72d03a8d5c27fc0a7b55ac47306b96afd4daf48b Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Tue, 5 Jun 2018 12:13:56 +0100 Subject: [PATCH 07/41] feat: skip or only specific tests License: MIT Signed-off-by: Alan Shaw --- README.md | 28 ++++++++++++++++++++++------ js/src/block/get.js | 3 ++- js/src/block/put.js | 3 ++- js/src/block/stat.js | 3 ++- js/src/bootstrap/add.js | 3 ++- js/src/bootstrap/list.js | 3 ++- js/src/bootstrap/rm.js | 3 ++- js/src/config/get.js | 3 ++- js/src/config/replace.js | 3 ++- js/src/config/set.js | 3 ++- js/src/dag/get.js | 3 ++- js/src/dag/put.js | 3 ++- js/src/dag/tree.js | 3 ++- js/src/utils/mocha.js | 28 +++++++++++++++++++++++++--- js/src/utils/suite.js | 15 +++++++++++++-- 15 files changed, 84 insertions(+), 23 deletions(-) diff --git a/README.md b/README.md index 419bb2dae..165d246d0 100644 --- a/README.md +++ b/README.md @@ -103,25 +103,41 @@ tests.repo.version(createCommon) #### Skipping tests ```js -tests.repo.version(createCommon) -tests.repo.stat(createCommon) tests.repo.gc(createCommon, { skip: true }) // pass an options object to skip these tests // OR, at the subsystem level -tests.repo(createCommon, { skip: ['gc'] }) +tests.repo(createCommon, { skip: ['gc'] }) // skips ALL the repo.gc tests +``` + +##### Skipping specific tests + +```js +tests.repo.gc(createCommon, { skip: ['should do a thing'] }) // named test(s) to skip + +// OR, at the subsystem level + +tests.repo(createCommon, { skip: ['should do a thing'] }) ``` #### Running only some tests ```js -tests.repo.version(createCommon) -tests.repo.stat(createCommon) tests.repo.gc(createCommon, { only: true }) // pass an options object to run only these tests // OR, at the subsystem level -tests.repo(createCommon, { only: ['gc'] }) +tests.repo(createCommon, { only: ['gc'] }) // runs only ALL the repo.gc tests +``` + +##### Running only specific tests + +```js +tests.repo.gc(createCommon, { only: ['should do a thing'] }) // only run these named test(s) + +// OR, at the subsystem level + +tests.repo(createCommon, { only: ['should do a thing'] }) ``` ### Go diff --git a/js/src/block/get.js b/js/src/block/get.js index d25c93123..944c50e3b 100644 --- a/js/src/block/get.js +++ b/js/src/block/get.js @@ -11,10 +11,11 @@ const multihash = require('multihashes') const CID = require('cids') const Buffer = require('safe-buffer').Buffer const auto = require('async/auto') -const { getDescribe } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) + const it = getIt(options) const common = createCommon() describe('.block.get', function () { diff --git a/js/src/block/put.js b/js/src/block/put.js index 10474d4e6..c6464045c 100644 --- a/js/src/block/put.js +++ b/js/src/block/put.js @@ -11,10 +11,11 @@ const Block = require('ipfs-block') const multihash = require('multihashes') const CID = require('cids') const Buffer = require('safe-buffer').Buffer -const { getDescribe } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) + const it = getIt(options) const common = createCommon() describe('.block.put', () => { diff --git a/js/src/block/stat.js b/js/src/block/stat.js index 6fe605149..2137c194e 100644 --- a/js/src/block/stat.js +++ b/js/src/block/stat.js @@ -9,10 +9,11 @@ const expect = chai.expect chai.use(dirtyChai) const CID = require('cids') const auto = require('async/auto') -const { getDescribe } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) + const it = getIt(options) const common = createCommon() describe('.block.stat', () => { diff --git a/js/src/bootstrap/add.js b/js/src/bootstrap/add.js index b060a07aa..e21a0bf44 100644 --- a/js/src/bootstrap/add.js +++ b/js/src/bootstrap/add.js @@ -6,13 +6,14 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) -const { getDescribe } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') const invalidArg = 'this/Is/So/Invalid/' const validIp4 = '/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z' module.exports = (createCommon, options) => { const describe = getDescribe(options) + const it = getIt(options) const common = createCommon() describe('.bootstrap.add', function () { diff --git a/js/src/bootstrap/list.js b/js/src/bootstrap/list.js index b23ae9618..1c8ad0be0 100644 --- a/js/src/bootstrap/list.js +++ b/js/src/bootstrap/list.js @@ -6,10 +6,11 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) -const { getDescribe } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) + const it = getIt(options) const common = createCommon() describe('.bootstrap.list', function () { diff --git a/js/src/bootstrap/rm.js b/js/src/bootstrap/rm.js index e292f164c..d83d60e1e 100644 --- a/js/src/bootstrap/rm.js +++ b/js/src/bootstrap/rm.js @@ -6,12 +6,13 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) -const { getDescribe } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') const invalidArg = 'this/Is/So/Invalid/' module.exports = (createCommon, options) => { const describe = getDescribe(options) + const it = getIt(options) const common = createCommon() describe('.bootstrap.rm', function () { diff --git a/js/src/config/get.js b/js/src/config/get.js index 4b2444a4e..2192b56d5 100644 --- a/js/src/config/get.js +++ b/js/src/config/get.js @@ -7,10 +7,11 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) -const { getDescribe } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) + const it = getIt(options) const common = createCommon() describe('.config.get', function () { diff --git a/js/src/config/replace.js b/js/src/config/replace.js index f8db929f9..978e9faeb 100644 --- a/js/src/config/replace.js +++ b/js/src/config/replace.js @@ -7,10 +7,11 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) -const { getDescribe } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) + const it = getIt(options) const common = createCommon() describe('.config.replace', function () { diff --git a/js/src/config/set.js b/js/src/config/set.js index ee29d7d3d..796d0c244 100644 --- a/js/src/config/set.js +++ b/js/src/config/set.js @@ -7,10 +7,11 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) -const { getDescribe } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) + const it = getIt(options) const common = createCommon() describe('.config.set', function () { diff --git a/js/src/dag/get.js b/js/src/dag/get.js index 91154b907..da8140fb5 100644 --- a/js/src/dag/get.js +++ b/js/src/dag/get.js @@ -12,10 +12,11 @@ const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode const dagCBOR = require('ipld-dag-cbor') const { spawnNodeWithId } = require('../utils/spawn') -const { getDescribe } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) + const it = getIt(options) const common = createCommon() describe('.dag.get', () => { diff --git a/js/src/dag/put.js b/js/src/dag/put.js index 0e87eb53f..65339d81b 100644 --- a/js/src/dag/put.js +++ b/js/src/dag/put.js @@ -12,10 +12,11 @@ const DAGNode = dagPB.DAGNode const dagCBOR = require('ipld-dag-cbor') const CID = require('cids') const { spawnNodeWithId } = require('../utils/spawn') -const { getDescribe } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) + const it = getIt(options) const common = createCommon() describe('.dag.put', () => { diff --git a/js/src/dag/tree.js b/js/src/dag/tree.js index e57c71efe..ebe615869 100644 --- a/js/src/dag/tree.js +++ b/js/src/dag/tree.js @@ -11,10 +11,11 @@ const { series, eachSeries } = require('async') const dagPB = require('ipld-dag-pb') const dagCBOR = require('ipld-dag-cbor') const { spawnNodeWithId } = require('../utils/spawn') -const { getDescribe } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) + const it = getIt(options) const common = createCommon() describe('.dag.tree', () => { diff --git a/js/src/utils/mocha.js b/js/src/utils/mocha.js index af3981ace..ef25ee6c9 100644 --- a/js/src/utils/mocha.js +++ b/js/src/utils/mocha.js @@ -1,10 +1,32 @@ /* eslint-env mocha */ -// Get a describe function that is optionally 'skipped' or 'onlyed' +// Get a "describe" function that is optionally 'skipped' or 'onlyed' function getDescribe (config) { - if (config && config.skip) return describe.skip - if (config && config.only) return describe.only + if (config && config.skip === true) return describe.skip + if (config && config.only === true) return describe.only return describe } module.exports.getDescribe = getDescribe + +// Get an "it" function that is optionally 'skipped' or 'onlyed' +function getIt (config) { + const _it = (name, impl) => { + if (config && Array.isArray(config.skip)) { + if (config.skip.includes(name)) return it.skip(name, impl) + } + + if (config && Array.isArray(config.only)) { + if (config.only.includes(name)) return it.only(name, impl) + } + + it(name, impl) + } + + _it.skip = it.skip + _it.only = it.only + + return _it +} + +module.exports.getIt = getIt diff --git a/js/src/utils/suite.js b/js/src/utils/suite.js index 0ad2cdbab..92db4e4af 100644 --- a/js/src/utils/suite.js +++ b/js/src/utils/suite.js @@ -2,8 +2,19 @@ function createSuite (tests) { const suite = (createCommon, options) => { Object.keys(tests).forEach(t => { const opts = Object.assign({}, options) - opts.skip = Array.isArray(opts.skip) ? opts.skip.includes(t) : opts.skip - opts.only = Array.isArray(opts.only) ? opts.only.includes(t) : opts.only + + if (Array.isArray(opts.skip)) { + if (opts.skip.includes(t)) { + opts.skip = true + } + } + + if (Array.isArray(opts.only)) { + if (opts.only.includes(t)) { + opts.only = true + } + } + tests[t](createCommon, opts) }) } From 3cd7f4ed461c7b9537740ff9b3c5219d7deaa59c Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Tue, 5 Jun 2018 12:24:46 +0100 Subject: [PATCH 08/41] fix: removes implementation specific skips License: MIT Signed-off-by: Alan Shaw --- js/src/dag/get.js | 20 -------------------- js/src/dag/put.js | 8 -------- 2 files changed, 28 deletions(-) diff --git a/js/src/dag/get.js b/js/src/dag/get.js index da8140fb5..d92ad0ae8 100644 --- a/js/src/dag/get.js +++ b/js/src/dag/get.js @@ -21,7 +21,6 @@ module.exports = (createCommon, options) => { describe('.dag.get', () => { let ipfs - let withGo before(function (done) { // CI takes longer to instantiate the daemon, so we need to increase the @@ -34,7 +33,6 @@ module.exports = (createCommon, options) => { spawnNodeWithId(factory, (err, node) => { expect(err).to.not.exist() ipfs = node - withGo = node.peerId.agentVersion.startsWith('go-ipfs') done() }) }) @@ -150,12 +148,6 @@ module.exports = (createCommon, options) => { }) it('should get a dag-pb node local value', function (done) { - // TODO vmx 2018-02-22: Currently not supported in go-ipfs, it might - // be possible once https://github.com/ipfs/go-ipfs/issues/4728 is - // done - if (withGo) { - this.skip() - } ipfs.dag.get(cidPb, 'Data', (err, result) => { expect(err).to.not.exist() expect(result.value).to.eql(Buffer.from('I am inside a Protobuf')) @@ -193,12 +185,6 @@ module.exports = (createCommon, options) => { it.skip('should get dag-cbor value via dag-pb node', (done) => {}) it('should get dag-pb value via dag-cbor node', function (done) { - // TODO vmx 2018-02-22: Currently not supported in go-ipfs, it might - // be possible once https://github.com/ipfs/go-ipfs/issues/4728 is - // done - if (withGo) { - this.skip() - } ipfs.dag.get(cidCbor, 'pb/Data', (err, result) => { expect(err).to.not.exist() expect(result.value).to.eql(Buffer.from('I am inside a Protobuf')) @@ -223,12 +209,6 @@ module.exports = (createCommon, options) => { }) it('should get by CID string + path', function (done) { - // TODO vmx 2018-02-22: Currently not supported in go-ipfs, it might - // be possible once https://github.com/ipfs/go-ipfs/issues/4728 is - // done - if (withGo) { - this.skip() - } const cidCborStr = cidCbor.toBaseEncodedString() ipfs.dag.get(cidCborStr + '/pb/Data', (err, result) => { diff --git a/js/src/dag/put.js b/js/src/dag/put.js index 65339d81b..9e48ea183 100644 --- a/js/src/dag/put.js +++ b/js/src/dag/put.js @@ -21,7 +21,6 @@ module.exports = (createCommon, options) => { describe('.dag.put', () => { let ipfs - let withGo before(function (done) { // CI takes longer to instantiate the daemon, so we need to increase the @@ -34,7 +33,6 @@ module.exports = (createCommon, options) => { spawnNodeWithId(factory, (err, node) => { expect(err).to.not.exist() ipfs = node - withGo = node.peerId.agentVersion.startsWith('go-ipfs') done() }) }) @@ -96,12 +94,6 @@ module.exports = (createCommon, options) => { }) it('should not put dag-cbor node with wrong multicodec', function (done) { - // This works in go-ipfs because dag-pb will serialize any object. If - // the object has neither a `data` nor `links` field it's serialized - // as an empty object - if (withGo) { - this.skip() - } ipfs.dag.put(cborNode, { format: 'dag-pb', hashAlg: 'sha3-512' From 4c7adbca9efe4acd2f9c48d21267b00ccbb5ef1b Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Tue, 5 Jun 2018 14:15:10 +0100 Subject: [PATCH 09/41] feat(dht): modularise DHT License: MIT Signed-off-by: Alan Shaw --- js/src/dht.js | 234 ---------------------------------------- js/src/dht/findpeer.js | 60 +++++++++++ js/src/dht/findprovs.js | 61 +++++++++++ js/src/dht/get.js | 70 ++++++++++++ js/src/dht/index.js | 13 +++ js/src/dht/provide.js | 101 +++++++++++++++++ js/src/dht/put.js | 33 ++++++ js/src/dht/query.js | 66 ++++++++++++ 8 files changed, 404 insertions(+), 234 deletions(-) delete mode 100644 js/src/dht.js create mode 100644 js/src/dht/findpeer.js create mode 100644 js/src/dht/findprovs.js create mode 100644 js/src/dht/get.js create mode 100644 js/src/dht/index.js create mode 100644 js/src/dht/provide.js create mode 100644 js/src/dht/put.js create mode 100644 js/src/dht/query.js diff --git a/js/src/dht.js b/js/src/dht.js deleted file mode 100644 index 6771cf7aa..000000000 --- a/js/src/dht.js +++ /dev/null @@ -1,234 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) -const waterfall = require('async/waterfall') -const parallel = require('async/parallel') -const CID = require('cids') -const { spawnNodesWithId } = require('./utils/spawn') - -module.exports = (common) => { - describe('.dht', function () { - this.timeout(80 * 1000) - - let withGo - let nodeA - let nodeB - let nodeC - let nodeD - let nodeE - - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodesWithId(5, factory, (err, nodes) => { - expect(err).to.not.exist() - - nodeA = nodes[0] - nodeB = nodes[1] - nodeC = nodes[2] - nodeD = nodes[3] - nodeE = nodes[4] - - parallel([ - (cb) => nodeA.swarm.connect(nodeB.peerId.addresses[0], cb), - (cb) => nodeB.swarm.connect(nodeC.peerId.addresses[0], cb), - (cb) => nodeC.swarm.connect(nodeA.peerId.addresses[0], cb), - (cb) => nodeD.swarm.connect(nodeA.peerId.addresses[0], cb), - (cb) => nodeE.swarm.connect(nodeA.peerId.addresses[0], cb), - (cb) => nodeD.swarm.connect(nodeB.peerId.addresses[0], cb), - (cb) => nodeE.swarm.connect(nodeB.peerId.addresses[0], cb), - (cb) => nodeD.swarm.connect(nodeC.peerId.addresses[0], cb), - (cb) => nodeE.swarm.connect(nodeC.peerId.addresses[0], cb), - (cb) => nodeD.swarm.connect(nodeE.peerId.addresses[0], cb), - (cb) => nodeA.id((err, id) => { - expect(err).to.not.exist() - withGo = id.agentVersion.startsWith('go-ipfs') - cb() - }) - ], done) - }) - }) - }) - - after((done) => common.teardown(done)) - - describe('.get and .put', () => { - it('errors when getting a non-existent key from the DHT', (done) => { - nodeA.dht.get('non-existing', { timeout: '100ms' }, (err, value) => { - expect(err).to.be.an.instanceof(Error) - done() - }) - }) - - it('fetches value after it was put on another node', function (done) { - this.timeout(80 * 1000) - - if (withGo) { - // go-ipfs errors with Error: key was not found (type 6) - // https://github.com/ipfs/go-ipfs/issues/3862 - this.skip() - } - - // TODO - this test needs to keep tryingl instead of the setTimeout - waterfall([ - (cb) => nodeB.object.new('unixfs-dir', cb), - (dagNode, cb) => setTimeout(() => cb(null, dagNode), 20000), - (dagNode, cb) => { - const multihash = dagNode.toJSON().multihash - - nodeA.dht.get(multihash, cb) - }, - (result, cb) => { - expect(result).to.eql('') - cb() - } - ], done) - }) - }) - - describe('.findpeer', () => { - it('finds other peers', (done) => { - nodeA.dht.findpeer(nodeC.peerId.id, (err, peer) => { - expect(err).to.not.exist() - // TODO upgrade the answer, format is weird - expect(peer[0].Responses[0].ID).to.be.equal(nodeC.peerId.id) - done() - }) - }) - - it('fails to find other peer, if peer does not exist', function (done) { - if (withGo) { - // TODO checking what is exactly go-ipfs returning - // https://github.com/ipfs/go-ipfs/issues/3862#issuecomment-294168090 - this.skip() - } - - nodeA.dht.findpeer('Qmd7qZS4T7xXtsNFdRoK1trfMs5zU94EpokQ9WFtxdPxsZ', (err, peer) => { - expect(err).to.not.exist() - expect(peer).to.be.equal(null) - done() - }) - }) - }) - - describe('.provide', () => { - it('regular', (done) => { - nodeC.files.add(Buffer.from('test'), (err, res) => { - if (err) return done(err) - - nodeC.dht.provide(new CID(res[0].hash), (err) => { - expect(err).to.not.exist() - done() - }) - }) - }) - - it('should not provide if block not found locally', (done) => { - const cid = new CID('Qmd7qZS4T7xXtsNFdRoK1trfMs5zU94EpokQ9WFtxdPxsZ') - - nodeC.dht.provide(cid, (err) => { - expect(err).to.exist() - expect(err.message).to.include('not found locally') - done() - }) - }) - - it('allows multiple CIDs to be passed', (done) => { - nodeC.files.add([Buffer.from('t0'), Buffer.from('t1')], (err, res) => { - if (err) return done(err) - - nodeC.dht.provide([ - new CID(res[0].hash), - new CID(res[1].hash) - ], (err) => { - expect(err).to.not.exist() - done() - }) - }) - }) - - it('should provide a CIDv1', (done) => { - nodeC.files.add(Buffer.from('test'), { 'cid-version': 1 }, (err, res) => { - if (err) return done(err) - - const cid = new CID(res[0].hash) - - nodeC.dht.provide(cid, (err) => { - expect(err).to.not.exist() - done() - }) - }) - }) - - it('errors on non CID arg', (done) => { - nodeC.dht.provide({}, (err) => { - expect(err).to.exist() - done() - }) - }) - - it('errors on array containing non CID arg', (done) => { - nodeC.dht.provide([{}], (err) => { - expect(err).to.exist() - done() - }) - }) - - it.skip('recursive', () => {}) - }) - - describe('findprovs', () => { - it('provide from one node and find it through another node', function (done) { - if (withGo) { - // TODO go-ipfs endpoint doesn't conform with the others - // https://github.com/ipfs/go-ipfs/issues/5047 - this.skip() - } - - waterfall([ - (cb) => nodeE.object.new('unixfs-dir', cb), - (dagNode, cb) => { - const cidV0 = new CID(dagNode.toJSON().multihash) - nodeE.dht.provide(cidV0, (err) => cb(err, cidV0)) - }, - (cidV0, cb) => nodeC.dht.findprovs(cidV0, cb), - (provs, cb) => { - expect(provs.map((p) => p.toB58String())) - .to.eql([nodeE.peerId.id]) - cb() - } - ], done) - }) - }) - - describe('.query', () => { - it('returns the other node in the query', function (done) { - const timeout = 150 * 1000 - this.timeout(timeout) - - // This test is meh. DHT works best with >= 20 nodes. Therefore a - // failure might happen, but we don't want to report it as such. - // Hence skip the test before the timeout is reached - const timeoutId = setTimeout(function () { - this.skip() - }.bind(this), timeout - 1000) - - nodeA.dht.query(nodeC.peerId.id, (err, peers) => { - clearTimeout(timeoutId) - expect(err).to.not.exist() - expect(peers.map((p) => p.ID)).to.include(nodeC.peerId.id) - done() - }) - }) - }) - }) -} diff --git a/js/src/dht/findpeer.js b/js/src/dht/findpeer.js new file mode 100644 index 000000000..272e4ff24 --- /dev/null +++ b/js/src/dht/findpeer.js @@ -0,0 +1,60 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { spawnNodesWithId } = require('../utils/spawn') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.dht.findpeer', function () { + this.timeout(80 * 1000) + + let nodeA + let nodeB + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + + spawnNodesWithId(2, factory, (err, nodes) => { + expect(err).to.not.exist() + + nodeA = nodes[0] + nodeB = nodes[1] + + nodeB.swarm.connect(nodeA.peerId.addresses[0], done) + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should find other peers', (done) => { + nodeA.dht.findpeer(nodeB.peerId.id, (err, peer) => { + expect(err).to.not.exist() + // TODO upgrade the answer, format is weird + expect(peer[0].Responses[0].ID).to.be.equal(nodeB.peerId.id) + done() + }) + }) + + it('should fail to find other peer if peer does not exist', function (done) { + nodeA.dht.findpeer('Qmd7qZS4T7xXtsNFdRoK1trfMs5zU94EpokQ9WFtxdPxsZ', (err, peer) => { + expect(err).to.not.exist() + expect(peer).to.be.equal(null) + done() + }) + }) + }) +} diff --git a/js/src/dht/findprovs.js b/js/src/dht/findprovs.js new file mode 100644 index 000000000..2c39eef8d --- /dev/null +++ b/js/src/dht/findprovs.js @@ -0,0 +1,61 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const waterfall = require('async/waterfall') +const CID = require('cids') +const { spawnNodesWithId } = require('../utils/spawn') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.dht.findprovs', function () { + this.timeout(80 * 1000) + + let nodeA + let nodeB + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + + spawnNodesWithId(2, factory, (err, nodes) => { + expect(err).to.not.exist() + + nodeA = nodes[0] + nodeB = nodes[1] + + nodeB.swarm.connect(nodeA.peerId.addresses[0], done) + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should provide from one node and find it through another node', function (done) { + waterfall([ + (cb) => nodeB.object.new('unixfs-dir', cb), + (dagNode, cb) => { + const cidV0 = new CID(dagNode.toJSON().multihash) + nodeB.dht.provide(cidV0, (err) => cb(err, cidV0)) + }, + (cidV0, cb) => nodeA.dht.findprovs(cidV0, cb), + (provs, cb) => { + expect(provs.map((p) => p.toB58String())) + .to.eql([nodeB.peerId.id]) + cb() + } + ], done) + }) + }) +} diff --git a/js/src/dht/get.js b/js/src/dht/get.js new file mode 100644 index 000000000..52aad028c --- /dev/null +++ b/js/src/dht/get.js @@ -0,0 +1,70 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const waterfall = require('async/waterfall') +const { spawnNodesWithId } = require('../utils/spawn') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.dht.get', function () { + this.timeout(80 * 1000) + + let nodeA + let nodeB + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + + spawnNodesWithId(2, factory, (err, nodes) => { + expect(err).to.not.exist() + + nodeA = nodes[0] + nodeB = nodes[1] + + nodeA.swarm.connect(nodeB.peerId.addresses[0], done) + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should error when getting a non-existent key from the DHT', (done) => { + nodeA.dht.get('non-existing', { timeout: '100ms' }, (err, value) => { + expect(err).to.be.an.instanceof(Error) + done() + }) + }) + + it('should get a value after it was put on another node', function (done) { + this.timeout(80 * 1000) + + // TODO - this test needs to keep tryingl instead of the setTimeout + waterfall([ + (cb) => nodeB.object.new('unixfs-dir', cb), + (dagNode, cb) => setTimeout(() => cb(null, dagNode), 20000), + (dagNode, cb) => { + const multihash = dagNode.toJSON().multihash + + nodeA.dht.get(multihash, cb) + }, + (result, cb) => { + expect(result).to.eql('') + cb() + } + ], done) + }) + }) +} diff --git a/js/src/dht/index.js b/js/src/dht/index.js new file mode 100644 index 000000000..07f5f68d9 --- /dev/null +++ b/js/src/dht/index.js @@ -0,0 +1,13 @@ +'use strict' +const { createSuite } = require('../utils/suite') + +const tests = { + get: require('./get'), + put: require('./put'), + findpeer: require('./findpeer'), + provide: require('./provide'), + findprovs: require('./findprovs'), + query: require('./query') +} + +module.exports = createSuite(tests) diff --git a/js/src/dht/provide.js b/js/src/dht/provide.js new file mode 100644 index 000000000..158b68599 --- /dev/null +++ b/js/src/dht/provide.js @@ -0,0 +1,101 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const CID = require('cids') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.dht.provide', function () { + this.timeout(80 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should provide local CID', (done) => { + ipfs.files.add(Buffer.from('test'), (err, res) => { + if (err) return done(err) + + ipfs.dht.provide(new CID(res[0].hash), (err) => { + expect(err).to.not.exist() + done() + }) + }) + }) + + it('should not provide if block not found locally', (done) => { + const cid = new CID('Qmd7qZS4T7xXtsNFdRoK1trfMs5zU94EpokQ9WFtxdPxsZ') + + ipfs.dht.provide(cid, (err) => { + expect(err).to.exist() + expect(err.message).to.include('not found locally') + done() + }) + }) + + it('should allow multiple CIDs to be passed', (done) => { + ipfs.files.add([Buffer.from('t0'), Buffer.from('t1')], (err, res) => { + if (err) return done(err) + + ipfs.dht.provide([ + new CID(res[0].hash), + new CID(res[1].hash) + ], (err) => { + expect(err).to.not.exist() + done() + }) + }) + }) + + it('should provide a CIDv1', (done) => { + ipfs.files.add(Buffer.from('test'), { 'cid-version': 1 }, (err, res) => { + if (err) return done(err) + + const cid = new CID(res[0].hash) + + ipfs.dht.provide(cid, (err) => { + expect(err).to.not.exist() + done() + }) + }) + }) + + it('should error on non CID arg', (done) => { + ipfs.dht.provide({}, (err) => { + expect(err).to.exist() + done() + }) + }) + + it('should error on array containing non CID arg', (done) => { + ipfs.dht.provide([{}], (err) => { + expect(err).to.exist() + done() + }) + }) + }) +} diff --git a/js/src/dht/put.js b/js/src/dht/put.js new file mode 100644 index 000000000..8337db19d --- /dev/null +++ b/js/src/dht/put.js @@ -0,0 +1,33 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.dht.put', function () { + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + done() + }) + }) + + after((done) => common.teardown(done)) + + it.skip('should put a value on the DHT', (done) => { + // TODO: implement me + }) + }) +} diff --git a/js/src/dht/query.js b/js/src/dht/query.js new file mode 100644 index 000000000..581475f33 --- /dev/null +++ b/js/src/dht/query.js @@ -0,0 +1,66 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { spawnNodesWithId } = require('../utils/spawn') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.dht.query', function () { + this.timeout(80 * 1000) + + let nodeA + let nodeB + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + + spawnNodesWithId(2, factory, (err, nodes) => { + expect(err).to.not.exist() + + nodeA = nodes[0] + nodeB = nodes[1] + + nodeB.swarm.connect(nodeA.peerId.addresses[0], done) + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should return the other node in the query', function (done) { + const timeout = 150 * 1000 + this.timeout(timeout) + + let skipped = false + + // This test is meh. DHT works best with >= 20 nodes. Therefore a + // failure might happen, but we don't want to report it as such. + // Hence skip the test before the timeout is reached + const timeoutId = setTimeout(function () { + skipped = true + this.skip() + }.bind(this), timeout - 1000) + + nodeA.dht.query(nodeB.peerId.id, (err, peers) => { + if (skipped) return + clearTimeout(timeoutId) + expect(err).to.not.exist() + expect(peers.map((p) => p.ID)).to.include(nodeB.peerId.id) + done() + }) + }) + }) +} From 3a5afe51bb259978b7bcd1c084d261b97313f1a4 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Wed, 6 Jun 2018 11:52:34 +0100 Subject: [PATCH 10/41] feat(files): modularised files License: MIT Signed-off-by: Alan Shaw --- js/src/files.js | 1170 --------------------------- js/src/files/add-pull-stream.js | 109 +++ js/src/files/add-readable-stream.js | 92 +++ js/src/files/add.js | 337 ++++++++ js/src/files/cat-pull-stream.js | 81 ++ js/src/files/cat-readable-stream.js | 79 ++ js/src/files/cat.js | 190 +++++ js/src/files/cp.js | 67 ++ js/src/files/flush.js | 60 ++ js/src/files/get-pull-stream.js | 69 ++ js/src/files/get-readable-stream.js | 66 ++ js/src/files/get.js | 260 ++++++ js/src/files/index.js | 25 + js/src/files/ls.js | 79 ++ js/src/files/mkdir.js | 60 ++ js/src/files/mv.js | 67 ++ js/src/files/read.js | 55 ++ js/src/files/rm.js | 67 ++ js/src/files/stat.js | 142 ++++ js/src/files/write.js | 53 ++ js/src/index.js | 22 +- js/src/ls/index.js | 10 + js/src/ls/ls-pull-stream.js | 135 ++++ js/src/ls/ls-readable-stream.js | 134 +++ js/src/ls/ls.js | 145 ++++ js/src/utils/mocha.js | 2 + 26 files changed, 2395 insertions(+), 1181 deletions(-) delete mode 100644 js/src/files.js create mode 100644 js/src/files/add-pull-stream.js create mode 100644 js/src/files/add-readable-stream.js create mode 100644 js/src/files/add.js create mode 100644 js/src/files/cat-pull-stream.js create mode 100644 js/src/files/cat-readable-stream.js create mode 100644 js/src/files/cat.js create mode 100644 js/src/files/cp.js create mode 100644 js/src/files/flush.js create mode 100644 js/src/files/get-pull-stream.js create mode 100644 js/src/files/get-readable-stream.js create mode 100644 js/src/files/get.js create mode 100644 js/src/files/index.js create mode 100644 js/src/files/ls.js create mode 100644 js/src/files/mkdir.js create mode 100644 js/src/files/mv.js create mode 100644 js/src/files/read.js create mode 100644 js/src/files/rm.js create mode 100644 js/src/files/stat.js create mode 100644 js/src/files/write.js create mode 100644 js/src/ls/index.js create mode 100644 js/src/ls/ls-pull-stream.js create mode 100644 js/src/ls/ls-readable-stream.js create mode 100644 js/src/ls/ls.js diff --git a/js/src/files.js b/js/src/files.js deleted file mode 100644 index ff8f3a961..000000000 --- a/js/src/files.js +++ /dev/null @@ -1,1170 +0,0 @@ -/* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - -'use strict' - -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) -const loadFixture = require('aegir/fixtures') -const bs58 = require('bs58') -const parallel = require('async/parallel') -const series = require('async/series') -const Readable = require('readable-stream').Readable -const pull = require('pull-stream') -const concat = require('concat-stream') -const through = require('through2') -const path = require('path') -const bl = require('bl') -const isNode = require('detect-node') -const CID = require('cids') -const expectTimeout = require('./utils/expect-timeout') - -module.exports = (common) => { - describe('.files', function () { - this.timeout(40 * 1000) - - let ipfs - let withGo - - function fixture (path) { - return loadFixture(path, 'interface-ipfs-core') - } - - const smallFile = { - cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', - data: fixture('js/test/fixtures/testfile.txt') - } - - const bigFile = { - cid: 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq', - data: fixture('js/test/fixtures/15mb.random') - } - - const directory = { - cid: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', - files: { - 'pp.txt': fixture('js/test/fixtures/test-folder/pp.txt'), - 'holmes.txt': fixture('js/test/fixtures/test-folder/holmes.txt'), - 'jungle.txt': fixture('js/test/fixtures/test-folder/jungle.txt'), - 'alice.txt': fixture('js/test/fixtures/test-folder/alice.txt'), - 'files/hello.txt': fixture('js/test/fixtures/test-folder/files/hello.txt'), - 'files/ipfs.txt': fixture('js/test/fixtures/test-folder/files/ipfs.txt') - } - } - - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - node.id((err, id) => { - expect(err).to.not.exist() - withGo = id.agentVersion.startsWith('go-ipfs') - done() - }) - }) - }) - }) - - after((done) => common.teardown(done)) - - describe('.add', () => { - it('a Buffer', (done) => { - ipfs.files.add(smallFile.data, (err, filesAdded) => { - expect(err).to.not.exist() - - expect(filesAdded).to.have.length(1) - const file = filesAdded[0] - expect(file.hash).to.equal(smallFile.cid) - expect(file.path).to.equal(smallFile.cid) - // file.size counts the overhead by IPLD nodes and unixfs protobuf - expect(file.size).greaterThan(smallFile.data.length) - done() - }) - }) - - it('a BIG buffer', (done) => { - ipfs.files.add(bigFile.data, (err, filesAdded) => { - expect(err).to.not.exist() - - expect(filesAdded).to.have.length(1) - const file = filesAdded[0] - expect(file.hash).to.equal(bigFile.cid) - expect(file.path).to.equal(bigFile.cid) - // file.size counts the overhead by IPLD nodes and unixfs protobuf - expect(file.size).greaterThan(bigFile.data.length) - done() - }) - }) - - it('a BIG buffer with progress enabled', (done) => { - let progCalled = false - let accumProgress = 0 - function handler (p) { - progCalled = true - accumProgress = p - } - - ipfs.files.add(bigFile.data, { progress: handler }, (err, filesAdded) => { - expect(err).to.not.exist() - - expect(filesAdded).to.have.length(1) - const file = filesAdded[0] - expect(file.hash).to.equal(bigFile.cid) - expect(file.path).to.equal(bigFile.cid) - - expect(progCalled).to.be.true() - expect(accumProgress).to.equal(bigFile.data.length) - done() - }) - }) - - it('a Buffer as tuple', (done) => { - const tuple = { path: 'testfile.txt', content: smallFile.data } - - ipfs.files.add([ - tuple - ], (err, filesAdded) => { - expect(err).to.not.exist() - - expect(filesAdded).to.have.length(1) - const file = filesAdded[0] - expect(file.hash).to.equal(smallFile.cid) - expect(file.path).to.equal('testfile.txt') - - done() - }) - }) - - it('add by path fails', (done) => { - const validPath = path.join(process.cwd() + '/package.json') - - ipfs.files.add(validPath, (err, res) => { - expect(err).to.exist() - done() - }) - }) - - it('adds from readable stream', (done) => { - const expectedCid = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS' - - const rs = new Readable() - rs.push(Buffer.from('some data')) - rs.push(null) - - ipfs.files.add(rs, (err, filesAdded) => { - expect(err).to.not.exist() - - expect(filesAdded).to.be.length(1) - const file = filesAdded[0] - expect(file.path).to.equal(expectedCid) - expect(file.size).to.equal(17) - expect(file.hash).to.equal(expectedCid) - done() - }) - }) - - it('adds from array of objects with readable stream content', (done) => { - const expectedCid = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS' - - const rs = new Readable() - rs.push(Buffer.from('some data')) - rs.push(null) - - const tuple = { path: 'data.txt', content: rs } - - ipfs.files.add([tuple], (err, filesAdded) => { - expect(err).to.not.exist() - - expect(filesAdded).to.be.length(1) - const file = filesAdded[0] - expect(file.path).to.equal('data.txt') - expect(file.size).to.equal(17) - expect(file.hash).to.equal(expectedCid) - done() - }) - }) - - it('adds from pull stream (callback)', (done) => { - const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' - - ipfs.files.add(pull.values([Buffer.from('test')]), (err, res) => { - if (err) return done(err) - expect(res).to.have.length(1) - expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) - done() - }) - }) - - it('adds from pull stream (promise)', () => { - const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' - - return ipfs.files.add(pull.values([Buffer.from('test')])) - .then((res) => { - expect(res).to.have.length(1) - expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) - }) - }) - - it('adds from array of objects with pull stream content', () => { - const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' - - return ipfs.files.add([{ content: pull.values([Buffer.from('test')]) }]) - .then((res) => { - expect(res).to.have.length(1) - expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) - }) - }) - - it('add a nested directory as array of tupples', function (done) { - // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 - if (!isNode) { this.skip() } - - const content = (name) => ({ - path: `test-folder/${name}`, - content: directory.files[name] - }) - - const emptyDir = (name) => ({ path: `test-folder/${name}` }) - - const dirs = [ - content('pp.txt'), - content('holmes.txt'), - content('jungle.txt'), - content('alice.txt'), - emptyDir('empty-folder'), - content('files/hello.txt'), - content('files/ipfs.txt'), - emptyDir('files/empty') - ] - - ipfs.files.add(dirs, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] - - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(directory.cid) - done() - }) - }) - - it('add a nested directory as array of tuppled with progress', function (done) { - // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 - if (!isNode) { this.skip() } - - const content = (name) => ({ - path: `test-folder/${name}`, - content: directory.files[name] - }) - - const emptyDir = (name) => ({ path: `test-folder/${name}` }) - - const dirs = [ - content('pp.txt'), - content('holmes.txt'), - content('jungle.txt'), - content('alice.txt'), - emptyDir('empty-folder'), - content('files/hello.txt'), - content('files/ipfs.txt'), - emptyDir('files/empty') - ] - - const total = dirs.reduce((i, entry) => { - return i + (entry.content ? entry.content.length : 0) - }, 0) - - let progCalled = false - let accumProgress = 0 - const handler = (p) => { - progCalled = true - accumProgress += p - } - - ipfs.files.add(dirs, { progress: handler }, (err, filesAdded) => { - expect(err).to.not.exist() - const root = filesAdded[filesAdded.length - 1] - - expect(progCalled).to.be.true() - expect(accumProgress).to.be.at.least(total) - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(directory.cid) - done() - }) - }) - - it('fails in invalid input', (done) => { - const nonValid = 'sfdasfasfs' - - ipfs.files.add(nonValid, (err, result) => { - expect(err).to.exist() - done() - }) - }) - - it('wrapWithDirectory', (done) => { - return ipfs.files.add({ path: 'testfile.txt', content: smallFile.data }, { wrapWithDirectory: true }, (err, filesAdded) => { - expect(err).to.not.exist() - expect(filesAdded).to.have.length(2) - const file = filesAdded[0] - const wrapped = filesAdded[1] - expect(file.hash).to.equal(smallFile.cid) - expect(file.path).to.equal('testfile.txt') - expect(wrapped.path).to.equal('') - done() - }) - }) - - it('Promise test', () => { - return ipfs.files.add(smallFile.data) - .then((filesAdded) => { - const file = filesAdded[0] - expect(file.hash).to.equal(smallFile.cid) - expect(file.path).to.equal(smallFile.cid) - }) - }) - - it('files.add with only-hash=true', () => { - this.slow(10 * 1000) - const content = String(Math.random() + Date.now()) - - return ipfs.files.add(Buffer.from(content), { onlyHash: true }) - .then(files => { - expect(files).to.have.length(1) - - // 'ipfs.object.get()' should timeout because content wasn't actually added - return expectTimeout(ipfs.object.get(files[0].hash), 4000) - }) - }) - }) - - describe('.addReadableStream', () => { - it('stream of valid files and dirs', function (done) { - // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 - if (!isNode) { this.skip() } - - const content = (name) => ({ - path: `test-folder/${name}`, - content: directory.files[name] - }) - - const emptyDir = (name) => ({ path: `test-folder/${name}` }) - - const files = [ - content('pp.txt'), - content('holmes.txt'), - content('jungle.txt'), - content('alice.txt'), - emptyDir('empty-folder'), - content('files/hello.txt'), - content('files/ipfs.txt'), - emptyDir('files/empty') - ] - - const stream = ipfs.files.addReadableStream() - - stream.on('error', (err) => { - expect(err).to.not.exist() - }) - - stream.on('data', (file) => { - if (file.path === 'test-folder') { - expect(file.hash).to.equal(directory.cid) - done() - } - }) - - files.forEach((file) => stream.write(file)) - stream.end() - }) - }) - - describe('.addPullStream', () => { - it('stream of valid files and dirs', function (done) { - // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 - if (!isNode) { this.skip() } - - const content = (name) => ({ - path: `test-folder/${name}`, - content: directory.files[name] - }) - - const emptyDir = (name) => ({ path: `test-folder/${name}` }) - - const files = [ - content('pp.txt'), - content('holmes.txt'), - content('jungle.txt'), - content('alice.txt'), - emptyDir('empty-folder'), - content('files/hello.txt'), - content('files/ipfs.txt'), - emptyDir('files/empty') - ] - - const stream = ipfs.files.addPullStream() - - pull( - pull.values(files), - stream, - pull.collect((err, filesAdded) => { - expect(err).to.not.exist() - - filesAdded.forEach((file) => { - if (file.path === 'test-folder') { - expect(file.hash).to.equal(directory.cid) - done() - } - }) - }) - ) - }) - - it('adds with object chunks and pull stream content', (done) => { - const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' - - pull( - pull.values([{ content: pull.values([Buffer.from('test')]) }]), - ipfs.files.addPullStream(), - pull.collect((err, res) => { - if (err) return done(err) - expect(res).to.have.length(1) - expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) - done() - }) - ) - }) - }) - - describe('.cat', () => { - before((done) => { - parallel([ - (cb) => ipfs.files.add(smallFile.data, cb), - (cb) => ipfs.files.add(bigFile.data, cb) - ], done) - }) - - it('with a base58 string encoded multihash', (done) => { - ipfs.files.cat(smallFile.cid, (err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - }) - }) - - it('with a multihash', (done) => { - const cid = Buffer.from(bs58.decode(smallFile.cid)) - - ipfs.files.cat(cid, (err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - }) - }) - - it('with a cid object', (done) => { - const cid = new CID(smallFile.cid) - - ipfs.files.cat(cid, (err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - }) - }) - - it('streams a large file', (done) => { - ipfs.files.cat(bigFile.cid, (err, data) => { - expect(err).to.not.exist() - expect(data.length).to.equal(bigFile.data.length) - expect(data).to.eql(bigFile.data) - done() - }) - }) - - it('with ipfs path', (done) => { - const ipfsPath = '/ipfs/' + smallFile.cid - - ipfs.files.cat(ipfsPath, (err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - }) - }) - - it('with ipfs path, nested value', (done) => { - const file = { path: 'a/testfile.txt', content: smallFile.data } - - ipfs.files.add([file], (err, filesAdded) => { - expect(err).to.not.exist() - - filesAdded.forEach((file) => { - if (file.path === 'a') { - ipfs.files.cat(`/ipfs/${file.hash}/testfile.txt`, (err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - }) - } - }) - }) - }) - - it('Promise test', () => { - return ipfs.files.cat(smallFile.cid) - .then((data) => { - expect(data.toString()).to.contain('Plz add me!') - }) - }) - - it('errors on invalid key', () => { - const invalidCid = 'somethingNotMultihash' - - return ipfs.files.cat(invalidCid) - .catch((err) => { - expect(err).to.exist() - - const errString = err.toString() - if (errString === 'Error: invalid ipfs ref path') { - expect(err.toString()).to.contain('Error: invalid ipfs ref path') - } - - if (errString === 'Error: Invalid Key') { - expect(err.toString()).to.contain('Error: Invalid Key') - } - }) - }) - - it('errors on unknown path', () => { - return ipfs.files.cat(smallFile.cid + '/does-not-exist') - .catch((err) => { - expect(err).to.exist() - expect(err.message).to.oneOf([ - 'No such file', - 'no link named "does-not-exist" under Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP']) - }) - }) - - it('errors on dir path', () => { - const file = { path: 'dir/testfile.txt', content: smallFile.data } - - return ipfs.files.add([file]) - .then((filesAdded) => { - expect(filesAdded.length).to.equal(2) - const files = filesAdded.filter((file) => file.path === 'dir') - expect(files.length).to.equal(1) - const dir = files[0] - return ipfs.files.cat(dir.hash) - .catch((err) => { - expect(err).to.exist() - expect(err.message).to.contain('this dag node is a directory') - }) - }) - }) - - it('exports a chunk of a file', function (done) { - if (withGo) { this.skip() } - - const offset = 1 - const length = 3 - - ipfs.files.cat(smallFile.cid, { - offset, - length - }, (err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.equal('lz ') - done() - }) - }) - }) - - describe('.catReadableStream', () => { - before((done) => ipfs.files.add(bigFile.data, done)) - - it('returns a Readable Stream for a cid', (done) => { - const stream = ipfs.files.catReadableStream(bigFile.cid) - - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data).to.eql(bigFile.data) - done() - })) - }) - - it('exports a chunk of a file in a ReadableStream', function (done) { - if (withGo) { this.skip() } - - const offset = 1 - const length = 3 - - const stream = ipfs.files.catReadableStream(smallFile.cid, { - offset, - length - }) - - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.equal('lz ') - done() - })) - }) - }) - - describe('.catPullStream', () => { - before((done) => ipfs.files.add(smallFile.data, done)) - - it('returns a Pull Stream for a cid', (done) => { - const stream = ipfs.files.catPullStream(smallFile.cid) - - pull( - stream, - pull.concat((err, data) => { - expect(err).to.not.exist() - expect(data.length).to.equal(smallFile.data.length) - expect(data).to.eql(smallFile.data.toString()) - done() - }) - ) - }) - - it('exports a chunk of a file in a PullStream', function (done) { - if (withGo) { this.skip() } - - const offset = 1 - const length = 3 - - const stream = ipfs.files.catPullStream(smallFile.cid, { - offset, - length - }) - - pull( - stream, - pull.concat((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.equal('lz ') - done() - }) - ) - }) - }) - - describe('.get', () => { - before((done) => { - parallel([ - (cb) => ipfs.files.add(smallFile.data, cb), - (cb) => ipfs.files.add(bigFile.data, cb) - ], done) - }) - - it('with a base58 encoded multihash', (done) => { - ipfs.files.get(smallFile.cid, (err, files) => { - expect(err).to.not.exist() - - expect(files).to.be.length(1) - expect(files[0].path).to.eql(smallFile.cid) - expect(files[0].content.toString('utf8')).to.contain('Plz add me!') - done() - }) - }) - - it('with a multihash', (done) => { - const cidBuf = Buffer.from(bs58.decode(smallFile.cid)) - ipfs.files.get(cidBuf, (err, files) => { - expect(err).to.not.exist() - - expect(files).to.be.length(1) - expect(files[0].path).to.eql(smallFile.cid) - expect(files[0].content.toString('utf8')).to.contain('Plz add me!') - done() - }) - }) - - it('large file', (done) => { - ipfs.files.get(bigFile.cid, (err, files) => { - expect(err).to.not.exist() - - expect(files.length).to.equal(1) - expect(files[0].path).to.equal(bigFile.cid) - expect(files[0].content.length).to.eql(bigFile.data.length) - expect(files[0].content).to.eql(bigFile.data) - done() - }) - }) - - it('directory', function (done) { - // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 - if (!isNode) { this.skip() } - - series([ - (cb) => { - const content = (name) => ({ - path: `test-folder/${name}`, - content: directory.files[name] - }) - - const emptyDir = (name) => ({ path: `test-folder/${name}` }) - - const dirs = [ - content('pp.txt'), - content('holmes.txt'), - content('jungle.txt'), - content('alice.txt'), - emptyDir('empty-folder'), - content('files/hello.txt'), - content('files/ipfs.txt'), - emptyDir('files/empty') - ] - - ipfs.files.add(dirs, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] - - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(directory.cid) - cb() - }) - }, - (cb) => { - ipfs.files.get(directory.cid, (err, files) => { - expect(err).to.not.exist() - - files = files.sort((a, b) => { - if (a.path > b.path) return 1 - if (a.path < b.path) return -1 - return 0 - }) - - // Check paths - const paths = files.map((file) => { return file.path }) - expect(paths).to.include.members([ - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/empty', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/hello.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/ipfs.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt' - ]) - - // Check contents - const contents = files.map((file) => { - return file.content - ? file.content.toString() - : null - }) - - expect(contents).to.include.members([ - directory.files['alice.txt'].toString(), - directory.files['files/hello.txt'].toString(), - directory.files['files/ipfs.txt'].toString(), - directory.files['holmes.txt'].toString(), - directory.files['jungle.txt'].toString(), - directory.files['pp.txt'].toString() - ]) - cb() - }) - } - ], done) - }) - - it('with ipfs path, as object and nested value', (done) => { - const file = { - path: 'a/testfile.txt', - content: smallFile.data - } - - ipfs.files.add(file, (err, filesAdded) => { - expect(err).to.not.exist() - - filesAdded.forEach((file) => { - if (file.path === 'a') { - ipfs.files.get(`/ipfs/${file.hash}/testfile.txt`, (err, files) => { - expect(err).to.not.exist() - expect(files).to.be.length(1) - expect(files[0].content.toString('utf8')).to.contain('Plz add me!') - done() - }) - } - }) - }) - }) - - it('with ipfs path, as array and nested value', (done) => { - const file = { - path: 'a/testfile.txt', - content: smallFile.data - } - - ipfs.files.add([file], (err, filesAdded) => { - expect(err).to.not.exist() - - filesAdded.forEach((file) => { - if (file.path === 'a') { - ipfs.files.get(`/ipfs/${file.hash}/testfile.txt`, (err, files) => { - expect(err).to.not.exist() - expect(files).to.be.length(1) - expect(files[0].content.toString('utf8')).to.contain('Plz add me!') - done() - }) - } - }) - }) - }) - - it('Promise test', () => { - return ipfs.files.get(smallFile.cid) - .then((files) => { - expect(files).to.be.length(1) - expect(files[0].path).to.equal(smallFile.cid) - expect(files[0].content.toString()).to.contain('Plz add me!') - }) - }) - - it('errors on invalid key', () => { - const invalidCid = 'somethingNotMultihash' - - return ipfs.files.get(invalidCid) - .catch((err) => { - expect(err).to.exist() - const errString = err.toString() - if (errString === 'Error: invalid ipfs ref path') { - expect(err.toString()).to.contain('Error: invalid ipfs ref path') - } - if (errString === 'Error: Invalid Key') { - expect(err.toString()).to.contain('Error: Invalid Key') - } - }) - }) - }) - - describe('.getReadableStream', () => { - before((done) => ipfs.files.add(smallFile.data, done)) - - it('returns a Readable Stream of Readable Streams', (done) => { - const stream = ipfs.files.getReadableStream(smallFile.cid) - - let files = [] - stream.pipe(through.obj((file, enc, next) => { - file.content.pipe(concat((content) => { - files.push({ path: file.path, content: content }) - next() - })) - }, () => { - expect(files).to.be.length(1) - expect(files[0].path).to.eql(smallFile.cid) - expect(files[0].content.toString()).to.contain('Plz add me!') - done() - })) - }) - }) - - describe('.getPullStream', () => { - before((done) => ipfs.files.add(smallFile.data, done)) - - it('returns a Pull Stream of Pull Streams', (done) => { - const stream = ipfs.files.getPullStream(smallFile.cid) - - pull( - stream, - pull.collect((err, files) => { - expect(err).to.not.exist() - expect(files).to.be.length(1) - expect(files[0].path).to.eql(smallFile.cid) - pull( - files[0].content, - pull.concat((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - }) - ) - }) - ) - }) - }) - - describe('.ls', () => { - before(function (done) { - // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 - if (!isNode) { this.skip() } - - const content = (name) => ({ - path: `test-folder/${name}`, - content: directory.files[name] - }) - - const emptyDir = (name) => ({ path: `test-folder/${name}` }) - - const dirs = [ - content('pp.txt'), - content('holmes.txt'), - content('jungle.txt'), - content('alice.txt'), - emptyDir('empty-folder'), - content('files/hello.txt'), - content('files/ipfs.txt'), - emptyDir('files/empty') - ] - - ipfs.files.add(dirs, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] - - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(directory.cid) - done() - }) - }) - - it('with a base58 encoded CID', function (done) { - // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 - if (!isNode) { this.skip() } - - const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' - ipfs.ls(cid, (err, files) => { - expect(err).to.not.exist() - - expect(files).to.eql([ - { depth: 1, - name: 'alice.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', - size: 11696, - hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', - type: 'file' }, - { depth: 1, - name: 'empty-folder', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', - size: 4, - hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', - type: 'dir' }, - { depth: 1, - name: 'files', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', - size: 183, - hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', - type: 'dir' }, - { depth: 1, - name: 'holmes.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', - size: 582072, - hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', - type: 'file' }, - { depth: 1, - name: 'jungle.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', - size: 2305, - hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', - type: 'file' }, - { depth: 1, - name: 'pp.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', - size: 4551, - hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', - type: 'file' } - ]) - done() - }) - }) - - it('should correctly handle a non existing hash', (done) => { - ipfs.ls('surelynotavalidhashheh?', (err, res) => { - expect(err).to.exist() - expect(res).to.not.exist() - done() - }) - }) - - it('should correctly handle a non exiting path', (done) => { - ipfs.ls('QmRNjDeKStKGTQXnJ2NFqeQ9oW/folder_that_isnt_there', (err, res) => { - expect(err).to.exist() - expect(res).to.not.exist() - done() - }) - }) - }) - - describe('.lsReadableStream', () => { - before(function (done) { - // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 - if (!isNode) { this.skip() } - - const content = (name) => ({ - path: `test-folder/${name}`, - content: directory.files[name] - }) - - const emptyDir = (name) => ({ path: `test-folder/${name}` }) - - const dirs = [ - content('pp.txt'), - content('holmes.txt'), - content('jungle.txt'), - content('alice.txt'), - emptyDir('empty-folder'), - content('files/hello.txt'), - content('files/ipfs.txt'), - emptyDir('files/empty') - ] - - ipfs.files.add(dirs, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] - - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(directory.cid) - done() - }) - }) - - it('with a base58 encoded CID', function (done) { - // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 - if (!isNode) { this.skip() } - - const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' - const stream = ipfs.lsReadableStream(cid) - - stream.pipe(concat((files) => { - expect(files).to.eql([ - { depth: 1, - name: 'alice.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', - size: 11696, - hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', - type: 'file' }, - { depth: 1, - name: 'empty-folder', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', - size: 4, - hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', - type: 'dir' }, - { depth: 1, - name: 'files', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', - size: 183, - hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', - type: 'dir' }, - { depth: 1, - name: 'holmes.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', - size: 582072, - hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', - type: 'file' }, - { depth: 1, - name: 'jungle.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', - size: 2305, - hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', - type: 'file' }, - { depth: 1, - name: 'pp.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', - size: 4551, - hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', - type: 'file' } - ]) - done() - })) - }) - }) - - describe('.lsPullStream', () => { - before(function (done) { - // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 - if (!isNode) { this.skip() } - - const content = (name) => ({ - path: `test-folder/${name}`, - content: directory.files[name] - }) - - const emptyDir = (name) => ({ path: `test-folder/${name}` }) - - const dirs = [ - content('pp.txt'), - content('holmes.txt'), - content('jungle.txt'), - content('alice.txt'), - emptyDir('empty-folder'), - content('files/hello.txt'), - content('files/ipfs.txt'), - emptyDir('files/empty') - ] - - ipfs.files.add(dirs, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] - - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(directory.cid) - done() - }) - }) - - it('with a base58 encoded CID', function (done) { - // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 - if (!isNode) { this.skip() } - - const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' - const stream = ipfs.lsPullStream(cid) - - pull( - stream, - pull.collect((err, files) => { - expect(err).to.not.exist() - - expect(files).to.eql([ - { depth: 1, - name: 'alice.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', - size: 11696, - hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', - type: 'file' }, - { depth: 1, - name: 'empty-folder', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', - size: 4, - hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', - type: 'dir' }, - { depth: 1, - name: 'files', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', - size: 183, - hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', - type: 'dir' }, - { depth: 1, - name: 'holmes.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', - size: 582072, - hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', - type: 'file' }, - { depth: 1, - name: 'jungle.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', - size: 2305, - hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', - type: 'file' }, - { depth: 1, - name: 'pp.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', - size: 4551, - hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', - type: 'file' } - ]) - done() - }) - ) - }) - }) - }) -} diff --git a/js/src/files/add-pull-stream.js b/js/src/files/add-pull-stream.js new file mode 100644 index 000000000..203030cce --- /dev/null +++ b/js/src/files/add-pull-stream.js @@ -0,0 +1,109 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const loadFixture = require('aegir/fixtures') +const pull = require('pull-stream') +const isNode = require('detect-node') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.files.addPullStream', function () { + this.timeout(40 * 1000) + + let ipfs + + const directory = { + cid: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', + files: { + 'pp.txt': loadFixture('js/test/fixtures/test-folder/pp.txt', 'interface-ipfs-core'), + 'holmes.txt': loadFixture('js/test/fixtures/test-folder/holmes.txt', 'interface-ipfs-core'), + 'jungle.txt': loadFixture('js/test/fixtures/test-folder/jungle.txt', 'interface-ipfs-core'), + 'alice.txt': loadFixture('js/test/fixtures/test-folder/alice.txt', 'interface-ipfs-core'), + 'files/hello.txt': loadFixture('js/test/fixtures/test-folder/files/hello.txt', 'interface-ipfs-core'), + 'files/ipfs.txt': loadFixture('js/test/fixtures/test-folder/files/ipfs.txt', 'interface-ipfs-core') + } + } + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should add stream of valid files and dirs', function (done) { + // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 + if (!isNode) { this.skip() } + + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] + }) + + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const files = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + const stream = ipfs.files.addPullStream() + + pull( + pull.values(files), + stream, + pull.collect((err, filesAdded) => { + expect(err).to.not.exist() + + filesAdded.forEach((file) => { + if (file.path === 'test-folder') { + expect(file.hash).to.equal(directory.cid) + done() + } + }) + }) + ) + }) + + it('should add with object chunks and pull stream content', (done) => { + const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' + + pull( + pull.values([{ content: pull.values([Buffer.from('test')]) }]), + ipfs.files.addPullStream(), + pull.collect((err, res) => { + if (err) return done(err) + expect(res).to.have.length(1) + expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) + done() + }) + ) + }) + }) +} diff --git a/js/src/files/add-readable-stream.js b/js/src/files/add-readable-stream.js new file mode 100644 index 000000000..7e96b8908 --- /dev/null +++ b/js/src/files/add-readable-stream.js @@ -0,0 +1,92 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const loadFixture = require('aegir/fixtures') +const isNode = require('detect-node') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.files.addReadableStream', function () { + this.timeout(40 * 1000) + + let ipfs + + const directory = { + cid: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', + files: { + 'pp.txt': loadFixture('js/test/fixtures/test-folder/pp.txt', 'interface-ipfs-core'), + 'holmes.txt': loadFixture('js/test/fixtures/test-folder/holmes.txt', 'interface-ipfs-core'), + 'jungle.txt': loadFixture('js/test/fixtures/test-folder/jungle.txt', 'interface-ipfs-core'), + 'alice.txt': loadFixture('js/test/fixtures/test-folder/alice.txt', 'interface-ipfs-core'), + 'files/hello.txt': loadFixture('js/test/fixtures/test-folder/files/hello.txt', 'interface-ipfs-core'), + 'files/ipfs.txt': loadFixture('js/test/fixtures/test-folder/files/ipfs.txt', 'interface-ipfs-core') + } + } + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should add stream of valid files and dirs', function (done) { + // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 + if (!isNode) { this.skip() } + + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] + }) + + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const files = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + const stream = ipfs.files.addReadableStream() + + stream.on('error', (err) => { + expect(err).to.not.exist() + }) + + stream.on('data', (file) => { + if (file.path === 'test-folder') { + expect(file.hash).to.equal(directory.cid) + done() + } + }) + + files.forEach((file) => stream.write(file)) + stream.end() + }) + }) +} diff --git a/js/src/files/add.js b/js/src/files/add.js new file mode 100644 index 000000000..ee15179ad --- /dev/null +++ b/js/src/files/add.js @@ -0,0 +1,337 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const loadFixture = require('aegir/fixtures') +const Readable = require('readable-stream').Readable +const pull = require('pull-stream') +const path = require('path') +const isNode = require('detect-node') +const expectTimeout = require('../utils/expect-timeout') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.files.add', function () { + this.timeout(40 * 1000) + + let ipfs + + const smallFile = { + cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', + data: loadFixture('js/test/fixtures/testfile.txt', 'interface-ipfs-core') + } + + const bigFile = { + cid: 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq', + data: loadFixture('js/test/fixtures/15mb.random', 'interface-ipfs-core') + } + + const directory = { + cid: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', + files: { + 'pp.txt': loadFixture('js/test/fixtures/test-folder/pp.txt', 'interface-ipfs-core'), + 'holmes.txt': loadFixture('js/test/fixtures/test-folder/holmes.txt', 'interface-ipfs-core'), + 'jungle.txt': loadFixture('js/test/fixtures/test-folder/jungle.txt', 'interface-ipfs-core'), + 'alice.txt': loadFixture('js/test/fixtures/test-folder/alice.txt', 'interface-ipfs-core'), + 'files/hello.txt': loadFixture('js/test/fixtures/test-folder/files/hello.txt', 'interface-ipfs-core'), + 'files/ipfs.txt': loadFixture('js/test/fixtures/test-folder/files/ipfs.txt', 'interface-ipfs-core') + } + } + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should add a Buffer', (done) => { + ipfs.files.add(smallFile.data, (err, filesAdded) => { + expect(err).to.not.exist() + + expect(filesAdded).to.have.length(1) + const file = filesAdded[0] + expect(file.hash).to.equal(smallFile.cid) + expect(file.path).to.equal(smallFile.cid) + // file.size counts the overhead by IPLD nodes and unixfs protobuf + expect(file.size).greaterThan(smallFile.data.length) + done() + }) + }) + + it('should add a Buffer (promised)', () => { + return ipfs.files.add(smallFile.data) + .then((filesAdded) => { + const file = filesAdded[0] + expect(file.hash).to.equal(smallFile.cid) + expect(file.path).to.equal(smallFile.cid) + }) + }) + + it('should add a BIG Buffer', (done) => { + ipfs.files.add(bigFile.data, (err, filesAdded) => { + expect(err).to.not.exist() + + expect(filesAdded).to.have.length(1) + const file = filesAdded[0] + expect(file.hash).to.equal(bigFile.cid) + expect(file.path).to.equal(bigFile.cid) + // file.size counts the overhead by IPLD nodes and unixfs protobuf + expect(file.size).greaterThan(bigFile.data.length) + done() + }) + }) + + it('should add a BIG Buffer with progress enabled', (done) => { + let progCalled = false + let accumProgress = 0 + function handler (p) { + progCalled = true + accumProgress = p + } + + ipfs.files.add(bigFile.data, { progress: handler }, (err, filesAdded) => { + expect(err).to.not.exist() + + expect(filesAdded).to.have.length(1) + const file = filesAdded[0] + expect(file.hash).to.equal(bigFile.cid) + expect(file.path).to.equal(bigFile.cid) + + expect(progCalled).to.be.true() + expect(accumProgress).to.equal(bigFile.data.length) + done() + }) + }) + + it('should add a Buffer as tuple', (done) => { + const tuple = { path: 'testfile.txt', content: smallFile.data } + + ipfs.files.add([ + tuple + ], (err, filesAdded) => { + expect(err).to.not.exist() + + expect(filesAdded).to.have.length(1) + const file = filesAdded[0] + expect(file.hash).to.equal(smallFile.cid) + expect(file.path).to.equal('testfile.txt') + + done() + }) + }) + + it('should not be able to add by path', (done) => { + const validPath = path.join(process.cwd() + '/package.json') + + ipfs.files.add(validPath, (err, res) => { + expect(err).to.exist() + done() + }) + }) + + it('should add readable stream', (done) => { + const expectedCid = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS' + + const rs = new Readable() + rs.push(Buffer.from('some data')) + rs.push(null) + + ipfs.files.add(rs, (err, filesAdded) => { + expect(err).to.not.exist() + + expect(filesAdded).to.be.length(1) + const file = filesAdded[0] + expect(file.path).to.equal(expectedCid) + expect(file.size).to.equal(17) + expect(file.hash).to.equal(expectedCid) + done() + }) + }) + + it('should add array of objects with readable stream content', (done) => { + const expectedCid = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS' + + const rs = new Readable() + rs.push(Buffer.from('some data')) + rs.push(null) + + const tuple = { path: 'data.txt', content: rs } + + ipfs.files.add([tuple], (err, filesAdded) => { + expect(err).to.not.exist() + + expect(filesAdded).to.be.length(1) + const file = filesAdded[0] + expect(file.path).to.equal('data.txt') + expect(file.size).to.equal(17) + expect(file.hash).to.equal(expectedCid) + done() + }) + }) + + it('should add pull stream', (done) => { + const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' + + ipfs.files.add(pull.values([Buffer.from('test')]), (err, res) => { + if (err) return done(err) + expect(res).to.have.length(1) + expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) + done() + }) + }) + + it('should add pull stream (promised)', () => { + const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' + + return ipfs.files.add(pull.values([Buffer.from('test')])) + .then((res) => { + expect(res).to.have.length(1) + expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) + }) + }) + + it('should add array of objects with pull stream content (promised)', () => { + const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' + + return ipfs.files.add([{ content: pull.values([Buffer.from('test')]) }]) + .then((res) => { + expect(res).to.have.length(1) + expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) + }) + }) + + it('should add a nested directory as array of tupples', function (done) { + // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 + if (!isNode) { this.skip() } + + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] + }) + + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + ipfs.files.add(dirs, (err, res) => { + expect(err).to.not.exist() + const root = res[res.length - 1] + + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(directory.cid) + done() + }) + }) + + it('should add a nested directory as array of tuppled with progress', function (done) { + // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 + if (!isNode) { this.skip() } + + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] + }) + + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + const total = dirs.reduce((i, entry) => { + return i + (entry.content ? entry.content.length : 0) + }, 0) + + let progCalled = false + let accumProgress = 0 + const handler = (p) => { + progCalled = true + accumProgress += p + } + + ipfs.files.add(dirs, { progress: handler }, (err, filesAdded) => { + expect(err).to.not.exist() + const root = filesAdded[filesAdded.length - 1] + + expect(progCalled).to.be.true() + expect(accumProgress).to.be.at.least(total) + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(directory.cid) + done() + }) + }) + + it('should fail when passed invalid input', (done) => { + const nonValid = 'sfdasfasfs' + + ipfs.files.add(nonValid, (err, result) => { + expect(err).to.exist() + done() + }) + }) + + it('should wrap content in a directory', (done) => { + const data = { path: 'testfile.txt', content: smallFile.data } + + ipfs.files.add(data, { wrapWithDirectory: true }, (err, filesAdded) => { + expect(err).to.not.exist() + expect(filesAdded).to.have.length(2) + const file = filesAdded[0] + const wrapped = filesAdded[1] + expect(file.hash).to.equal(smallFile.cid) + expect(file.path).to.equal('testfile.txt') + expect(wrapped.path).to.equal('') + done() + }) + }) + + it('should add with only-hash=true (promised)', function () { + this.slow(10 * 1000) + const content = String(Math.random() + Date.now()) + + return ipfs.files.add(Buffer.from(content), { onlyHash: true }) + .then(files => { + expect(files).to.have.length(1) + + // 'ipfs.object.get()' should timeout because content wasn't actually added + return expectTimeout(ipfs.object.get(files[0].hash), 4000) + }) + }) + }) +} diff --git a/js/src/files/cat-pull-stream.js b/js/src/files/cat-pull-stream.js new file mode 100644 index 000000000..4dde40f7e --- /dev/null +++ b/js/src/files/cat-pull-stream.js @@ -0,0 +1,81 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const loadFixture = require('aegir/fixtures') +const pull = require('pull-stream') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.files.catPullStream', function () { + this.timeout(40 * 1000) + + let ipfs + + const smallFile = { + cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', + data: loadFixture('js/test/fixtures/testfile.txt', 'interface-ipfs-core') + } + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + before((done) => ipfs.files.add(smallFile.data, done)) + + after((done) => common.teardown(done)) + + it('should return a Pull Stream for a CID', (done) => { + const stream = ipfs.files.catPullStream(smallFile.cid) + + pull( + stream, + pull.concat((err, data) => { + expect(err).to.not.exist() + expect(data.length).to.equal(smallFile.data.length) + expect(data).to.eql(smallFile.data.toString()) + done() + }) + ) + }) + + it('should export a chunk of a file in a Pull Stream', (done) => { + const offset = 1 + const length = 3 + + const stream = ipfs.files.catPullStream(smallFile.cid, { + offset, + length + }) + + pull( + stream, + pull.concat((err, data) => { + expect(err).to.not.exist() + expect(data.toString()).to.equal('lz ') + done() + }) + ) + }) + }) +} diff --git a/js/src/files/cat-readable-stream.js b/js/src/files/cat-readable-stream.js new file mode 100644 index 000000000..5c81fb508 --- /dev/null +++ b/js/src/files/cat-readable-stream.js @@ -0,0 +1,79 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const loadFixture = require('aegir/fixtures') +const bl = require('bl') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.files.catReadableStream', function () { + this.timeout(40 * 1000) + + let ipfs + + const smallFile = { + cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', + data: loadFixture('js/test/fixtures/testfile.txt', 'interface-ipfs-core') + } + + const bigFile = { + cid: 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq', + data: loadFixture('js/test/fixtures/15mb.random', 'interface-ipfs-core') + } + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + before((done) => ipfs.files.add(bigFile.data, done)) + + after((done) => common.teardown(done)) + + it('should return a Readable Stream for a CID', (done) => { + const stream = ipfs.files.catReadableStream(bigFile.cid) + + stream.pipe(bl((err, data) => { + expect(err).to.not.exist() + expect(data).to.eql(bigFile.data) + done() + })) + }) + + it('should export a chunk of a file in a Readable Stream', (done) => { + const offset = 1 + const length = 3 + + const stream = ipfs.files.catReadableStream(smallFile.cid, { + offset, + length + }) + + stream.pipe(bl((err, data) => { + expect(err).to.not.exist() + expect(data.toString()).to.equal('lz ') + done() + })) + }) + }) +} diff --git a/js/src/files/cat.js b/js/src/files/cat.js new file mode 100644 index 000000000..ba13756b1 --- /dev/null +++ b/js/src/files/cat.js @@ -0,0 +1,190 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const loadFixture = require('aegir/fixtures') +const bs58 = require('bs58') +const parallel = require('async/parallel') +const CID = require('cids') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.files.cat', function () { + this.timeout(40 * 1000) + + let ipfs + + const smallFile = { + cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', + data: loadFixture('js/test/fixtures/testfile.txt', 'interface-ipfs-core') + } + + const bigFile = { + cid: 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq', + data: loadFixture('js/test/fixtures/15mb.random', 'interface-ipfs-core') + } + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + before((done) => { + parallel([ + (cb) => ipfs.files.add(smallFile.data, cb), + (cb) => ipfs.files.add(bigFile.data, cb) + ], done) + }) + + it('should cat with a base58 string encoded multihash', (done) => { + ipfs.files.cat(smallFile.cid, (err, data) => { + expect(err).to.not.exist() + expect(data.toString()).to.contain('Plz add me!') + done() + }) + }) + + it('should cat with a base58 string encoded multihash (promised)', () => { + return ipfs.files.cat(smallFile.cid) + .then((data) => { + expect(data.toString()).to.contain('Plz add me!') + }) + }) + + it('should cat with a Buffer multihash', (done) => { + const cid = Buffer.from(bs58.decode(smallFile.cid)) + + ipfs.files.cat(cid, (err, data) => { + expect(err).to.not.exist() + expect(data.toString()).to.contain('Plz add me!') + done() + }) + }) + + it('should cat with a CID object', (done) => { + const cid = new CID(smallFile.cid) + + ipfs.files.cat(cid, (err, data) => { + expect(err).to.not.exist() + expect(data.toString()).to.contain('Plz add me!') + done() + }) + }) + + it('should cat a BIG file', (done) => { + ipfs.files.cat(bigFile.cid, (err, data) => { + expect(err).to.not.exist() + expect(data.length).to.equal(bigFile.data.length) + expect(data).to.eql(bigFile.data) + done() + }) + }) + + it('should cat with IPFS path', (done) => { + const ipfsPath = '/ipfs/' + smallFile.cid + + ipfs.files.cat(ipfsPath, (err, data) => { + expect(err).to.not.exist() + expect(data.toString()).to.contain('Plz add me!') + done() + }) + }) + + it('should cat with IPFS path, nested value', (done) => { + const file = { path: 'a/testfile.txt', content: smallFile.data } + + ipfs.files.add([file], (err, filesAdded) => { + expect(err).to.not.exist() + + const file = filesAdded.find((f) => f.path === 'a') + expect(file).to.exist() + + ipfs.files.cat(`/ipfs/${file.hash}/testfile.txt`, (err, data) => { + expect(err).to.not.exist() + expect(data.toString()).to.contain('Plz add me!') + done() + }) + }) + }) + + it('should error on invalid key (promised)', () => { + const invalidCid = 'somethingNotMultihash' + + return ipfs.files.cat(invalidCid) + .catch((err) => { + expect(err).to.exist() + + const errString = err.toString() + if (errString === 'Error: invalid ipfs ref path') { + expect(err.toString()).to.contain('Error: invalid ipfs ref path') + } + + if (errString === 'Error: Invalid Key') { + expect(err.toString()).to.contain('Error: Invalid Key') + } + }) + }) + + it('should error on unknown path (promised)', () => { + return ipfs.files.cat(smallFile.cid + '/does-not-exist') + .catch((err) => { + expect(err).to.exist() + expect(err.message).to.oneOf([ + 'No such file', + 'no link named "does-not-exist" under Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP']) + }) + }) + + it('should error on dir path (promised)', () => { + const file = { path: 'dir/testfile.txt', content: smallFile.data } + + return ipfs.files.add([file]) + .then((filesAdded) => { + expect(filesAdded.length).to.equal(2) + const files = filesAdded.filter((file) => file.path === 'dir') + expect(files.length).to.equal(1) + const dir = files[0] + return ipfs.files.cat(dir.hash) + .catch((err) => { + expect(err).to.exist() + expect(err.message).to.contain('this dag node is a directory') + }) + }) + }) + + it('should export a chunk of a file', (done) => { + const offset = 1 + const length = 3 + + ipfs.files.cat(smallFile.cid, { + offset, + length + }, (err, data) => { + expect(err).to.not.exist() + expect(data.toString()).to.equal('lz ') + done() + }) + }) + }) +} diff --git a/js/src/files/cp.js b/js/src/files/cp.js new file mode 100644 index 000000000..5f8c03e2c --- /dev/null +++ b/js/src/files/cp.js @@ -0,0 +1,67 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.files.cp', function () { + this.timeout(40 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should copy file, expect error', (done) => { + ipfs.files.cp(['/test/c', '/test/b'], (err) => { + expect(err).to.exist() + done() + }) + }) + + it('should copy file, expect no error', (done) => { + ipfs.files.cp(['/test/a', '/test/b'], (err) => { + expect(err).to.not.exist() + done() + }) + }) + + it('should copy dir, expect error', (done) => { + ipfs.files.cp(['/test/lv1/lv3', '/test/lv1/lv4'], (err) => { + expect(err).to.exist() + done() + }) + }) + + it('should copy dir, expect no error', (done) => { + ipfs.files.cp(['/test/lv1/lv2', '/test/lv1/lv3'], (err) => { + expect(err).to.not.exist() + done() + }) + }) + }) +} diff --git a/js/src/files/flush.js b/js/src/files/flush.js new file mode 100644 index 000000000..3bfdbb34e --- /dev/null +++ b/js/src/files/flush.js @@ -0,0 +1,60 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.files.flush', function () { + this.timeout(40 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should not flush not found, expect error', (done) => { + ipfs.files.flush('/test/404', (err) => { + expect(err).to.exist() + done() + }) + }) + + it('should flush root', (done) => { + ipfs.files.flush((err) => { + expect(err).to.not.exist() + done() + }) + }) + + it('should flush specific dir', (done) => { + ipfs.files.flush('/test', (err) => { + expect(err).to.not.exist() + done() + }) + }) + }) +} diff --git a/js/src/files/get-pull-stream.js b/js/src/files/get-pull-stream.js new file mode 100644 index 000000000..1ff79389d --- /dev/null +++ b/js/src/files/get-pull-stream.js @@ -0,0 +1,69 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const loadFixture = require('aegir/fixtures') +const pull = require('pull-stream') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.files.getPullStream', function () { + this.timeout(40 * 1000) + + let ipfs + + const smallFile = { + cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', + data: loadFixture('js/test/fixtures/testfile.txt', 'interface-ipfs-core') + } + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + before((done) => ipfs.files.add(smallFile.data, done)) + + after((done) => common.teardown(done)) + + it('should return a Pull Stream of Pull Streams', (done) => { + const stream = ipfs.files.getPullStream(smallFile.cid) + + pull( + stream, + pull.collect((err, files) => { + expect(err).to.not.exist() + expect(files).to.be.length(1) + expect(files[0].path).to.eql(smallFile.cid) + pull( + files[0].content, + pull.concat((err, data) => { + expect(err).to.not.exist() + expect(data.toString()).to.contain('Plz add me!') + done() + }) + ) + }) + ) + }) + }) +} diff --git a/js/src/files/get-readable-stream.js b/js/src/files/get-readable-stream.js new file mode 100644 index 000000000..d7517f24b --- /dev/null +++ b/js/src/files/get-readable-stream.js @@ -0,0 +1,66 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const loadFixture = require('aegir/fixtures') +const concat = require('concat-stream') +const through = require('through2') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.files.getReadableStream', function () { + this.timeout(40 * 1000) + + let ipfs + + const smallFile = { + cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', + data: loadFixture('js/test/fixtures/testfile.txt', 'interface-ipfs-core') + } + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + before((done) => ipfs.files.add(smallFile.data, done)) + + after((done) => common.teardown(done)) + + it('should return a Readable Stream of Readable Streams', (done) => { + const stream = ipfs.files.getReadableStream(smallFile.cid) + + let files = [] + stream.pipe(through.obj((file, enc, next) => { + file.content.pipe(concat((content) => { + files.push({ path: file.path, content: content }) + next() + })) + }, () => { + expect(files).to.be.length(1) + expect(files[0].path).to.eql(smallFile.cid) + expect(files[0].content.toString()).to.contain('Plz add me!') + done() + })) + }) + }) +} diff --git a/js/src/files/get.js b/js/src/files/get.js new file mode 100644 index 000000000..2f490a683 --- /dev/null +++ b/js/src/files/get.js @@ -0,0 +1,260 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const loadFixture = require('aegir/fixtures') +const bs58 = require('bs58') +const parallel = require('async/parallel') +const series = require('async/series') +const isNode = require('detect-node') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.files.get', function () { + this.timeout(40 * 1000) + + let ipfs + + function fixture (path) { + return loadFixture(path, 'interface-ipfs-core') + } + + const smallFile = { + cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', + data: fixture('js/test/fixtures/testfile.txt') + } + + const bigFile = { + cid: 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq', + data: fixture('js/test/fixtures/15mb.random') + } + + const directory = { + cid: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', + files: { + 'pp.txt': fixture('js/test/fixtures/test-folder/pp.txt'), + 'holmes.txt': fixture('js/test/fixtures/test-folder/holmes.txt'), + 'jungle.txt': fixture('js/test/fixtures/test-folder/jungle.txt'), + 'alice.txt': fixture('js/test/fixtures/test-folder/alice.txt'), + 'files/hello.txt': fixture('js/test/fixtures/test-folder/files/hello.txt'), + 'files/ipfs.txt': fixture('js/test/fixtures/test-folder/files/ipfs.txt') + } + } + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + before((done) => { + parallel([ + (cb) => ipfs.files.add(smallFile.data, cb), + (cb) => ipfs.files.add(bigFile.data, cb) + ], done) + }) + + after((done) => common.teardown(done)) + + it('should get with a base58 encoded multihash', (done) => { + ipfs.files.get(smallFile.cid, (err, files) => { + expect(err).to.not.exist() + + expect(files).to.be.length(1) + expect(files[0].path).to.eql(smallFile.cid) + expect(files[0].content.toString('utf8')).to.contain('Plz add me!') + done() + }) + }) + + it('should get with a base58 encoded multihash (promised)', () => { + return ipfs.files.get(smallFile.cid) + .then((files) => { + expect(files).to.be.length(1) + expect(files[0].path).to.equal(smallFile.cid) + expect(files[0].content.toString()).to.contain('Plz add me!') + }) + }) + + it('should get with a Buffer multihash', (done) => { + const cidBuf = Buffer.from(bs58.decode(smallFile.cid)) + ipfs.files.get(cidBuf, (err, files) => { + expect(err).to.not.exist() + + expect(files).to.be.length(1) + expect(files[0].path).to.eql(smallFile.cid) + expect(files[0].content.toString('utf8')).to.contain('Plz add me!') + done() + }) + }) + + it('should get a BIG file', (done) => { + ipfs.files.get(bigFile.cid, (err, files) => { + expect(err).to.not.exist() + + expect(files.length).to.equal(1) + expect(files[0].path).to.equal(bigFile.cid) + expect(files[0].content.length).to.eql(bigFile.data.length) + expect(files[0].content).to.eql(bigFile.data) + done() + }) + }) + + it('should get a directory', function (done) { + // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 + if (!isNode) { this.skip() } + + series([ + (cb) => { + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] + }) + + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + ipfs.files.add(dirs, (err, res) => { + expect(err).to.not.exist() + const root = res[res.length - 1] + + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(directory.cid) + cb() + }) + }, + (cb) => { + ipfs.files.get(directory.cid, (err, files) => { + expect(err).to.not.exist() + + files = files.sort((a, b) => { + if (a.path > b.path) return 1 + if (a.path < b.path) return -1 + return 0 + }) + + // Check paths + const paths = files.map((file) => { return file.path }) + expect(paths).to.include.members([ + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/empty', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/hello.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/ipfs.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt' + ]) + + // Check contents + const contents = files.map((file) => { + return file.content + ? file.content.toString() + : null + }) + + expect(contents).to.include.members([ + directory.files['alice.txt'].toString(), + directory.files['files/hello.txt'].toString(), + directory.files['files/ipfs.txt'].toString(), + directory.files['holmes.txt'].toString(), + directory.files['jungle.txt'].toString(), + directory.files['pp.txt'].toString() + ]) + cb() + }) + } + ], done) + }) + + it('should get with ipfs path, as object and nested value', (done) => { + const file = { + path: 'a/testfile.txt', + content: smallFile.data + } + + ipfs.files.add(file, (err, filesAdded) => { + expect(err).to.not.exist() + + filesAdded.forEach((file) => { + if (file.path === 'a') { + ipfs.files.get(`/ipfs/${file.hash}/testfile.txt`, (err, files) => { + expect(err).to.not.exist() + expect(files).to.be.length(1) + expect(files[0].content.toString('utf8')).to.contain('Plz add me!') + done() + }) + } + }) + }) + }) + + it('should get with ipfs path, as array and nested value', (done) => { + const file = { + path: 'a/testfile.txt', + content: smallFile.data + } + + ipfs.files.add([file], (err, filesAdded) => { + expect(err).to.not.exist() + + filesAdded.forEach((file) => { + if (file.path === 'a') { + ipfs.files.get(`/ipfs/${file.hash}/testfile.txt`, (err, files) => { + expect(err).to.not.exist() + expect(files).to.be.length(1) + expect(files[0].content.toString('utf8')).to.contain('Plz add me!') + done() + }) + } + }) + }) + }) + + it('should error on invalid key', () => { + const invalidCid = 'somethingNotMultihash' + + return ipfs.files.get(invalidCid) + .catch((err) => { + expect(err).to.exist() + const errString = err.toString() + if (errString === 'Error: invalid ipfs ref path') { + expect(err.toString()).to.contain('Error: invalid ipfs ref path') + } + if (errString === 'Error: Invalid Key') { + expect(err.toString()).to.contain('Error: Invalid Key') + } + }) + }) + }) +} diff --git a/js/src/files/index.js b/js/src/files/index.js new file mode 100644 index 000000000..11829cc14 --- /dev/null +++ b/js/src/files/index.js @@ -0,0 +1,25 @@ +'use strict' +const { createSuite } = require('../utils/suite') + +const tests = { + add: require('./add'), + addReadableStream: require('./add-readable-stream'), + addPullStream: require('./add-pull-stream'), + cat: require('./cat'), + catReadableStream: require('./cat-readable-stream'), + catPullStream: require('./cat-pull-stream'), + get: require('./get'), + getReadableStream: require('./get-readable-stream'), + getPullStream: require('./get-pull-stream'), + mkdir: require('./mkdir'), + write: require('./write'), + cp: require('./cp'), + mv: require('./mv'), + rm: require('./rm'), + stat: require('./stat'), + read: require('./read'), + ls: require('./ls'), + flush: require('./flush') +} + +module.exports = createSuite(tests) diff --git a/js/src/files/ls.js b/js/src/files/ls.js new file mode 100644 index 000000000..312e68162 --- /dev/null +++ b/js/src/files/ls.js @@ -0,0 +1,79 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.files.ls', function () { + this.timeout(40 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should not ls not found, expect error', (done) => { + ipfs.files.ls('/test/404', (err, info) => { + expect(err).to.exist() + expect(info).to.not.exist() + done() + }) + }) + + it('should ls directory', (done) => { + ipfs.files.ls('/test', (err, info) => { + expect(err).to.not.exist() + expect(info).to.eql([ + { name: 'b', type: 0, size: 0, hash: '' }, + { name: 'lv1', type: 0, size: 0, hash: '' } + ]) + done() + }) + }) + + it('should ls -l directory', (done) => { + ipfs.files.ls('/test', { l: true }, (err, info) => { + expect(err).to.not.exist() + expect(info).to.eql([ + { + name: 'b', + type: 0, + size: 13, + hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T' + }, + { + name: 'lv1', + type: 1, + size: 0, + hash: 'QmaSPtNHYKPjNjQnYX9pdu5ocpKUQEL3itSz8LuZcoW6J5' + } + ]) + done() + }) + }) + }) +} diff --git a/js/src/files/mkdir.js b/js/src/files/mkdir.js new file mode 100644 index 000000000..4a9609bd9 --- /dev/null +++ b/js/src/files/mkdir.js @@ -0,0 +1,60 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.files.mkdir', function () { + this.timeout(40 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should make directory on root', (done) => { + ipfs.files.mkdir('/test', (err) => { + expect(err).to.not.exist() + done() + }) + }) + + it('should make directory and its parents', (done) => { + ipfs.files.mkdir('/test/lv1/lv2', { p: true }, (err) => { + expect(err).to.not.exist() + done() + }) + }) + + it('should make already existent directory', (done) => { + ipfs.files.mkdir('/', (err) => { + expect(err).to.exist() + done() + }) + }) + }) +} diff --git a/js/src/files/mv.js b/js/src/files/mv.js new file mode 100644 index 000000000..4bcddda88 --- /dev/null +++ b/js/src/files/mv.js @@ -0,0 +1,67 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.files.mv', function () { + this.timeout(40 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should move file, expect error', (done) => { + ipfs.files.mv(['/test/404', '/test/a'], (err) => { + expect(err).to.exist() + done() + }) + }) + + it('should move file, expect no error', (done) => { + ipfs.files.mv(['/test/a', '/test/c'], (err) => { + expect(err).to.not.exist() + done() + }) + }) + + it('should move dir, expect error', (done) => { + ipfs.files.mv(['/test/lv1/404', '/test/lv1'], (err) => { + expect(err).to.exist() + done() + }) + }) + + it('should move dir, expect no error', (done) => { + ipfs.files.mv(['/test/lv1/lv2', '/test/lv1/lv4'], (err) => { + expect(err).to.not.exist() + done() + }) + }) + }) +} diff --git a/js/src/files/read.js b/js/src/files/read.js new file mode 100644 index 000000000..c5a3f7210 --- /dev/null +++ b/js/src/files/read.js @@ -0,0 +1,55 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.files.read', function () { + this.timeout(40 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should not read not found, expect error', (done) => { + ipfs.files.read('/test/404', (err, buf) => { + expect(err).to.exist() + expect(buf).to.not.exist() + done() + }) + }) + + it('should read file', (done) => { + ipfs.files.read('/test/b', (err, buf) => { + expect(err).to.not.exist() + expect(buf).to.eql(Buffer.from('Hello, world!')) + done() + }) + }) + }) +} diff --git a/js/src/files/rm.js b/js/src/files/rm.js new file mode 100644 index 000000000..8a2145c7c --- /dev/null +++ b/js/src/files/rm.js @@ -0,0 +1,67 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.files.rm', function () { + this.timeout(40 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should remove file, expect error', (done) => { + ipfs.files.rm('/test/a', (err) => { + expect(err).to.exist() + done() + }) + }) + + it('should remove file, expect no error', (done) => { + ipfs.files.rm('/test/c', (err) => { + expect(err).to.not.exist() + done() + }) + }) + + it('should remove dir, expect error', (done) => { + ipfs.files.rm('/test/lv1/lv4', (err) => { + expect(err).to.exist() + done() + }) + }) + + it('should remove dir, expect no error', (done) => { + ipfs.files.rm('/test/lv1/lv4', {recursive: true}, (err) => { + expect(err).to.not.exist() + done() + }) + }) + }) +} diff --git a/js/src/files/stat.js b/js/src/files/stat.js new file mode 100644 index 000000000..b04993a83 --- /dev/null +++ b/js/src/files/stat.js @@ -0,0 +1,142 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const loadFixture = require('aegir/fixtures') +const expect = chai.expect +chai.use(dirtyChai) +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.files.stat', function () { + this.timeout(40 * 1000) + + let ipfs + + const smallFile = { + cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', + data: loadFixture('js/test/fixtures/testfile.txt', 'interface-ipfs-core') + } + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + before((done) => ipfs.files.add(smallFile.data, done)) + + after((done) => common.teardown(done)) + + it('should not stat not found, expect error', function (done) { + ipfs.files.stat('/test/404', (err) => { + expect(err).to.exist() + done() + }) + }) + + it('should stat file', function (done) { + ipfs.files.stat('/test/b', (err, stat) => { + expect(err).to.not.exist() + expect(stat).to.eql({ + type: 'file', + blocks: 1, + size: 13, + hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T', + cumulativeSize: 71, + withLocality: false, + local: undefined, + sizeLocal: undefined + }) + done() + }) + }) + + it('should stat dir', function (done) { + ipfs.files.stat('/test', (err, stat) => { + expect(err).to.not.exist() + expect(stat).to.eql({ + type: 'directory', + blocks: 2, + size: 0, + hash: 'QmVrkkNurBCeJvPRohW5JTvJG4AxGrFg7FnmsZZUS6nJto', + cumulativeSize: 216, + withLocality: false, + local: undefined, + sizeLocal: undefined + }) + done() + }) + }) + + // TODO enable this test when this feature gets released on go-ipfs + it.skip('should stat withLocal file', function (done) { + ipfs.files.stat('/test/b', {withLocal: true}, (err, stat) => { + expect(err).to.not.exist() + expect(stat).to.eql({ + type: 'file', + blocks: 1, + size: 13, + hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T', + cumulativeSize: 71, + withLocality: true, + local: true, + sizeLocal: 71 + }) + done() + }) + }) + + // TODO enable this test when this feature gets released on go-ipfs + it.skip('should stat withLocal dir', function (done) { + ipfs.files.stat('/test', {withLocal: true}, (err, stat) => { + expect(err).to.not.exist() + expect(stat).to.eql({ + type: 'directory', + blocks: 2, + size: 0, + hash: 'QmVrkkNurBCeJvPRohW5JTvJG4AxGrFg7FnmsZZUS6nJto', + cumulativeSize: 216, + withLocality: true, + local: true, + sizeLocal: 216 + }) + done() + }) + }) + + // TODO: (achingbrain) - Not yet supported in js-ipfs or go-ipfs yet') + it.skip('should stat outside of mfs', function (done) { + ipfs.files.stat('/ipfs/' + smallFile.cid, (err, stat) => { + expect(err).to.not.exist() + expect(stat).to.eql({ + type: 'file', + blocks: 0, + size: 12, + hash: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', + cumulativeSize: 20, + withLocality: false, + local: undefined, + sizeLocal: undefined + }) + done() + }) + }) + }) +} diff --git a/js/src/files/write.js b/js/src/files/write.js new file mode 100644 index 000000000..3cbf19877 --- /dev/null +++ b/js/src/files/write.js @@ -0,0 +1,53 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.files.write', function () { + this.timeout(40 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should not write to non existent file, expect error', function (done) { + ipfs.files.write('/test/a', Buffer.from('Hello, world!'), (err) => { + expect(err).to.exist() + done() + }) + }) + + it('should write to non existent file with create flag, expect no error', function (done) { + ipfs.files.write('/test/a', Buffer.from('Hello, world!'), {create: true}, (err) => { + expect(err).to.not.exist() + done() + }) + }) + }) +} diff --git a/js/src/index.js b/js/src/index.js index 07904c431..1bcf026c1 100644 --- a/js/src/index.js +++ b/js/src/index.js @@ -1,22 +1,22 @@ 'use strict' -exports.object = require('./object') -exports.files = require('./files') -exports.filesMFS = require('./files-mfs') +exports.block = require('./block') +exports.bootstrap = require('./bootstrap') exports.config = require('./config') -exports.pin = require('./pin') +exports.dag = require('./dag') +exports.dht = require('./dht') +exports.files = require('./files') +exports.key = require('./key') +exports.ls = require('./ls') exports.generic = require('./miscellaneous') exports.miscellaneous = require('./miscellaneous') -exports.swarm = require('./swarm') -exports.block = require('./block') -exports.dht = require('./dht') -exports.dag = require('./dag') +exports.object = require('./object') +exports.pin = require('./pin') exports.ping = require('./ping') exports.pubsub = require('./pubsub') -exports.key = require('./key') -exports.stats = require('./stats') exports.repo = require('./repo') -exports.bootstrap = require('./bootstrap') +exports.stats = require('./stats') +exports.swarm = require('./swarm') exports.types = require('./types') exports.util = require('./util') exports.bitswap = require('./bitswap') diff --git a/js/src/ls/index.js b/js/src/ls/index.js new file mode 100644 index 000000000..6016fd03a --- /dev/null +++ b/js/src/ls/index.js @@ -0,0 +1,10 @@ +'use strict' +const { createSuite } = require('../utils/suite') + +const tests = { + ls: require('./ls'), + lsReadableStream: require('./ls-readable-stream'), + lsPullStream: require('./ls-pull-stream') +} + +module.exports = createSuite(tests) diff --git a/js/src/ls/ls-pull-stream.js b/js/src/ls/ls-pull-stream.js new file mode 100644 index 000000000..4b770e0ef --- /dev/null +++ b/js/src/ls/ls-pull-stream.js @@ -0,0 +1,135 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const loadFixture = require('aegir/fixtures') +const pull = require('pull-stream') +const isNode = require('detect-node') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.lsPullStream', function () { + this.timeout(40 * 1000) + + let ipfs + + const directory = { + cid: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', + files: { + 'pp.txt': loadFixture('js/test/fixtures/test-folder/pp.txt', 'interface-ipfs-core'), + 'holmes.txt': loadFixture('js/test/fixtures/test-folder/holmes.txt', 'interface-ipfs-core'), + 'jungle.txt': loadFixture('js/test/fixtures/test-folder/jungle.txt', 'interface-ipfs-core'), + 'alice.txt': loadFixture('js/test/fixtures/test-folder/alice.txt', 'interface-ipfs-core'), + 'files/hello.txt': loadFixture('js/test/fixtures/test-folder/files/hello.txt', 'interface-ipfs-core'), + 'files/ipfs.txt': loadFixture('js/test/fixtures/test-folder/files/ipfs.txt', 'interface-ipfs-core') + } + } + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should pull stream ls with a base58 encoded CID', function (done) { + // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 + if (!isNode) { this.skip() } + + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] + }) + + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + ipfs.files.add(dirs, (err, res) => { + expect(err).to.not.exist() + const root = res[res.length - 1] + + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(directory.cid) + + const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + const stream = ipfs.lsPullStream(cid) + + pull( + stream, + pull.collect((err, files) => { + expect(err).to.not.exist() + + expect(files).to.eql([ + { depth: 1, + name: 'alice.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + size: 11696, + hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', + type: 'file' }, + { depth: 1, + name: 'empty-folder', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', + size: 4, + hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', + type: 'dir' }, + { depth: 1, + name: 'files', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', + size: 183, + hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', + type: 'dir' }, + { depth: 1, + name: 'holmes.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', + size: 582072, + hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', + type: 'file' }, + { depth: 1, + name: 'jungle.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', + size: 2305, + hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', + type: 'file' }, + { depth: 1, + name: 'pp.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', + size: 4551, + hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', + type: 'file' } + ]) + done() + }) + ) + }) + }) + }) +} diff --git a/js/src/ls/ls-readable-stream.js b/js/src/ls/ls-readable-stream.js new file mode 100644 index 000000000..9c68fc4e9 --- /dev/null +++ b/js/src/ls/ls-readable-stream.js @@ -0,0 +1,134 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const loadFixture = require('aegir/fixtures') +const concat = require('concat-stream') +const isNode = require('detect-node') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.lsReadableStream', function () { + this.timeout(40 * 1000) + + let ipfs + + function fixture (path) { + return loadFixture(path, 'interface-ipfs-core') + } + + const directory = { + cid: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', + files: { + 'pp.txt': fixture('js/test/fixtures/test-folder/pp.txt'), + 'holmes.txt': fixture('js/test/fixtures/test-folder/holmes.txt'), + 'jungle.txt': fixture('js/test/fixtures/test-folder/jungle.txt'), + 'alice.txt': fixture('js/test/fixtures/test-folder/alice.txt'), + 'files/hello.txt': fixture('js/test/fixtures/test-folder/files/hello.txt'), + 'files/ipfs.txt': fixture('js/test/fixtures/test-folder/files/ipfs.txt') + } + } + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should readable stream ls with a base58 encoded CID', function (done) { + // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 + if (!isNode) { this.skip() } + + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] + }) + + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + ipfs.files.add(dirs, (err, res) => { + expect(err).to.not.exist() + const root = res[res.length - 1] + + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(directory.cid) + + const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + const stream = ipfs.lsReadableStream(cid) + + stream.pipe(concat((files) => { + expect(files).to.eql([ + { depth: 1, + name: 'alice.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + size: 11696, + hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', + type: 'file' }, + { depth: 1, + name: 'empty-folder', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', + size: 4, + hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', + type: 'dir' }, + { depth: 1, + name: 'files', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', + size: 183, + hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', + type: 'dir' }, + { depth: 1, + name: 'holmes.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', + size: 582072, + hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', + type: 'file' }, + { depth: 1, + name: 'jungle.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', + size: 2305, + hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', + type: 'file' }, + { depth: 1, + name: 'pp.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', + size: 4551, + hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', + type: 'file' } + ]) + done() + })) + }) + }) + }) +} diff --git a/js/src/ls/ls.js b/js/src/ls/ls.js new file mode 100644 index 000000000..f866eac0c --- /dev/null +++ b/js/src/ls/ls.js @@ -0,0 +1,145 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const loadFixture = require('aegir/fixtures') +const isNode = require('detect-node') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.ls', function () { + this.timeout(40 * 1000) + + let ipfs + + const directory = { + cid: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', + files: { + 'pp.txt': loadFixture('js/test/fixtures/test-folder/pp.txt', 'interface-ipfs-core'), + 'holmes.txt': loadFixture('js/test/fixtures/test-folder/holmes.txt', 'interface-ipfs-core'), + 'jungle.txt': loadFixture('js/test/fixtures/test-folder/jungle.txt', 'interface-ipfs-core'), + 'alice.txt': loadFixture('js/test/fixtures/test-folder/alice.txt', 'interface-ipfs-core'), + 'files/hello.txt': loadFixture('js/test/fixtures/test-folder/files/hello.txt', 'interface-ipfs-core'), + 'files/ipfs.txt': loadFixture('js/test/fixtures/test-folder/files/ipfs.txt', 'interface-ipfs-core') + } + } + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should ls with a base58 encoded CID', function (done) { + // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 + if (!isNode) { this.skip() } + + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] + }) + + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + ipfs.files.add(dirs, (err, res) => { + expect(err).to.not.exist() + const root = res[res.length - 1] + + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(directory.cid) + + const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + ipfs.ls(cid, (err, files) => { + expect(err).to.not.exist() + + expect(files).to.eql([ + { depth: 1, + name: 'alice.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + size: 11696, + hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', + type: 'file' }, + { depth: 1, + name: 'empty-folder', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', + size: 4, + hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', + type: 'dir' }, + { depth: 1, + name: 'files', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', + size: 183, + hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', + type: 'dir' }, + { depth: 1, + name: 'holmes.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', + size: 582072, + hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', + type: 'file' }, + { depth: 1, + name: 'jungle.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', + size: 2305, + hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', + type: 'file' }, + { depth: 1, + name: 'pp.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', + size: 4551, + hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', + type: 'file' } + ]) + done() + }) + }) + }) + + it('should correctly handle a non existing hash', (done) => { + ipfs.ls('surelynotavalidhashheh?', (err, res) => { + expect(err).to.exist() + expect(res).to.not.exist() + done() + }) + }) + + it('should correctly handle a non exiting path', (done) => { + ipfs.ls('QmRNjDeKStKGTQXnJ2NFqeQ9oW/folder_that_isnt_there', (err, res) => { + expect(err).to.exist() + expect(res).to.not.exist() + done() + }) + }) + }) +} diff --git a/js/src/utils/mocha.js b/js/src/utils/mocha.js index ef25ee6c9..e4004c7a0 100644 --- a/js/src/utils/mocha.js +++ b/js/src/utils/mocha.js @@ -1,6 +1,7 @@ /* eslint-env mocha */ // Get a "describe" function that is optionally 'skipped' or 'onlyed' +// If skip/only are boolean true, then we want to skip/only the whole suite function getDescribe (config) { if (config && config.skip === true) return describe.skip if (config && config.only === true) return describe.only @@ -10,6 +11,7 @@ function getDescribe (config) { module.exports.getDescribe = getDescribe // Get an "it" function that is optionally 'skipped' or 'onlyed' +// If skip/only are an array, then we _might_ want to skip/only the specific test function getIt (config) { const _it = (name, impl) => { if (config && Array.isArray(config.skip)) { From a3008e66b683e73d12116f3111387540a5558a85 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Wed, 6 Jun 2018 15:33:23 +0100 Subject: [PATCH 11/41] feat(files): isolate MFS tests License: MIT Signed-off-by: Alan Shaw --- js/src/files/cp.js | 25 ++++++++++++--- js/src/files/flush.js | 15 +++++++-- js/src/files/ls.js | 70 ++++++++++++++++++++++++++--------------- js/src/files/mkdir.js | 11 +++++-- js/src/files/mv.js | 43 ++++++++++++++++++-------- js/src/files/read.js | 21 ++++++++++--- js/src/files/rm.js | 36 ++++++++++++++-------- js/src/files/stat.js | 72 ++++++++++++++++++++++++++++--------------- js/src/files/write.js | 9 ++++-- 9 files changed, 213 insertions(+), 89 deletions(-) diff --git a/js/src/files/cp.js b/js/src/files/cp.js index 5f8c03e2c..2e8d4e08a 100644 --- a/js/src/files/cp.js +++ b/js/src/files/cp.js @@ -4,6 +4,8 @@ 'use strict' const chai = require('chai') +const series = require('async/series') +const hat = require('hat') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) @@ -37,28 +39,43 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) it('should copy file, expect error', (done) => { - ipfs.files.cp(['/test/c', '/test/b'], (err) => { + const testDir = `/test-${hat()}` + + ipfs.files.cp([`${testDir}/c`, `${testDir}/b`], (err) => { expect(err).to.exist() done() }) }) it('should copy file, expect no error', (done) => { - ipfs.files.cp(['/test/a', '/test/b'], (err) => { + const testDir = `/test-${hat()}` + + series([ + (cb) => ipfs.files.mkdir(testDir, { p: true }, cb), + (cb) => ipfs.files.write(`${testDir}/a`, Buffer.from('TEST'), { create: true }, cb), + (cb) => ipfs.files.cp([`${testDir}/a`, `${testDir}/b`], cb) + ], (err) => { expect(err).to.not.exist() done() }) }) it('should copy dir, expect error', (done) => { - ipfs.files.cp(['/test/lv1/lv3', '/test/lv1/lv4'], (err) => { + const testDir = `/test-${hat()}` + + ipfs.files.cp([`${testDir}/lv1/lv3`, `${testDir}/lv1/lv4`], (err) => { expect(err).to.exist() done() }) }) it('should copy dir, expect no error', (done) => { - ipfs.files.cp(['/test/lv1/lv2', '/test/lv1/lv3'], (err) => { + const testDir = `/test-${hat()}` + + series([ + (cb) => ipfs.files.mkdir(`${testDir}/lv1/lv2`, { p: true }, cb), + (cb) => ipfs.files.cp([`${testDir}/lv1/lv2`, `${testDir}/lv1/lv3`], cb) + ], (err) => { expect(err).to.not.exist() done() }) diff --git a/js/src/files/flush.js b/js/src/files/flush.js index 3bfdbb34e..eb9f4f20b 100644 --- a/js/src/files/flush.js +++ b/js/src/files/flush.js @@ -4,6 +4,8 @@ 'use strict' const chai = require('chai') +const series = require('async/series') +const hat = require('hat') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) @@ -36,8 +38,10 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should not flush not found, expect error', (done) => { - ipfs.files.flush('/test/404', (err) => { + it('should not flush not found file/dir, expect error', (done) => { + const testDir = `/test-${hat()}` + + ipfs.files.flush(`${testDir}/404`, (err) => { expect(err).to.exist() done() }) @@ -51,7 +55,12 @@ module.exports = (createCommon, options) => { }) it('should flush specific dir', (done) => { - ipfs.files.flush('/test', (err) => { + const testDir = `/test-${hat()}` + + series([ + (cb) => ipfs.files.mkdir(testDir, { p: true }, cb), + (cb) => ipfs.files.flush(testDir, cb) + ], (err) => { expect(err).to.not.exist() done() }) diff --git a/js/src/files/ls.js b/js/src/files/ls.js index 312e68162..a1ee6426e 100644 --- a/js/src/files/ls.js +++ b/js/src/files/ls.js @@ -4,6 +4,8 @@ 'use strict' const chai = require('chai') +const series = require('async/series') +const hat = require('hat') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) @@ -36,8 +38,10 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should not ls not found, expect error', (done) => { - ipfs.files.ls('/test/404', (err, info) => { + it('should not ls not found file/dir, expect error', (done) => { + const testDir = `/test-${hat()}` + + ipfs.files.ls(`${testDir}/404`, (err, info) => { expect(err).to.exist() expect(info).to.not.exist() done() @@ -45,34 +49,52 @@ module.exports = (createCommon, options) => { }) it('should ls directory', (done) => { - ipfs.files.ls('/test', (err, info) => { + const testDir = `/test-${hat()}` + + series([ + (cb) => ipfs.files.mkdir(`${testDir}/lv1`, { p: true }, cb), + (cb) => ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }, cb) + ], (err) => { expect(err).to.not.exist() - expect(info).to.eql([ - { name: 'b', type: 0, size: 0, hash: '' }, - { name: 'lv1', type: 0, size: 0, hash: '' } - ]) - done() + + ipfs.files.ls(testDir, (err, info) => { + expect(err).to.not.exist() + expect(info).to.eql([ + { name: 'b', type: 0, size: 0, hash: '' }, + { name: 'lv1', type: 0, size: 0, hash: '' } + ]) + done() + }) }) }) it('should ls -l directory', (done) => { - ipfs.files.ls('/test', { l: true }, (err, info) => { + const testDir = `/test-${hat()}` + + series([ + (cb) => ipfs.files.mkdir(`${testDir}/lv1`, { p: true }, cb), + (cb) => ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }, cb) + ], (err) => { expect(err).to.not.exist() - expect(info).to.eql([ - { - name: 'b', - type: 0, - size: 13, - hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T' - }, - { - name: 'lv1', - type: 1, - size: 0, - hash: 'QmaSPtNHYKPjNjQnYX9pdu5ocpKUQEL3itSz8LuZcoW6J5' - } - ]) - done() + + ipfs.files.ls(testDir, { l: true }, (err, info) => { + expect(err).to.not.exist() + expect(info).to.eql([ + { + name: 'lv1', + type: 1, + size: 0, + hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' + }, + { + name: 'b', + type: 0, + size: 13, + hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T' + } + ]) + done() + }) }) }) }) diff --git a/js/src/files/mkdir.js b/js/src/files/mkdir.js index 4a9609bd9..b7fc25523 100644 --- a/js/src/files/mkdir.js +++ b/js/src/files/mkdir.js @@ -4,6 +4,7 @@ 'use strict' const chai = require('chai') +const hat = require('hat') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) @@ -37,20 +38,24 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) it('should make directory on root', (done) => { - ipfs.files.mkdir('/test', (err) => { + const testDir = `/test-${hat()}` + + ipfs.files.mkdir(testDir, (err) => { expect(err).to.not.exist() done() }) }) it('should make directory and its parents', (done) => { - ipfs.files.mkdir('/test/lv1/lv2', { p: true }, (err) => { + const testDir = `/test-${hat()}` + + ipfs.files.mkdir(`${testDir}/lv1/lv2`, { p: true }, (err) => { expect(err).to.not.exist() done() }) }) - it('should make already existent directory', (done) => { + it('should not make already existent directory', (done) => { ipfs.files.mkdir('/', (err) => { expect(err).to.exist() done() diff --git a/js/src/files/mv.js b/js/src/files/mv.js index 4bcddda88..3e3290341 100644 --- a/js/src/files/mv.js +++ b/js/src/files/mv.js @@ -4,6 +4,8 @@ 'use strict' const chai = require('chai') +const series = require('async/series') +const hat = require('hat') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) @@ -34,33 +36,50 @@ module.exports = (createCommon, options) => { }) }) + before((done) => { + series([ + (cb) => ipfs.files.mkdir('/test/lv1/lv2', { p: true }, cb), + (cb) => ipfs.files.write('/test/a', Buffer.from('Hello, world!'), { create: true }, cb) + ], done) + }) + after((done) => common.teardown(done)) - it('should move file, expect error', (done) => { - ipfs.files.mv(['/test/404', '/test/a'], (err) => { + it('should not move not found file/dir, expect error', (done) => { + const testDir = `/test-${hat()}` + + ipfs.files.mv([`${testDir}/404`, `${testDir}/a`], (err) => { expect(err).to.exist() done() }) }) it('should move file, expect no error', (done) => { - ipfs.files.mv(['/test/a', '/test/c'], (err) => { + const testDir = `/test-${hat()}` + + series([ + (cb) => ipfs.files.mkdir(`${testDir}/lv1/lv2`, { p: true }, cb), + (cb) => ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }, cb) + ], (err) => { expect(err).to.not.exist() - done() - }) - }) - it('should move dir, expect error', (done) => { - ipfs.files.mv(['/test/lv1/404', '/test/lv1'], (err) => { - expect(err).to.exist() - done() + ipfs.files.mv([`${testDir}/a`, `${testDir}/c`], (err) => { + expect(err).to.not.exist() + done() + }) }) }) it('should move dir, expect no error', (done) => { - ipfs.files.mv(['/test/lv1/lv2', '/test/lv1/lv4'], (err) => { + const testDir = `/test-${hat()}` + + ipfs.files.mkdir(`${testDir}/lv1/lv2`, { p: true }, (err) => { expect(err).to.not.exist() - done() + + ipfs.files.mv(['/test/lv1/lv2', '/test/lv1/lv4'], (err) => { + expect(err).to.not.exist() + done() + }) }) }) }) diff --git a/js/src/files/read.js b/js/src/files/read.js index c5a3f7210..2b09f85ab 100644 --- a/js/src/files/read.js +++ b/js/src/files/read.js @@ -4,6 +4,8 @@ 'use strict' const chai = require('chai') +const series = require('async/series') +const hat = require('hat') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) @@ -37,7 +39,9 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) it('should not read not found, expect error', (done) => { - ipfs.files.read('/test/404', (err, buf) => { + const testDir = `/test-${hat()}` + + ipfs.files.read(`${testDir}/404`, (err, buf) => { expect(err).to.exist() expect(buf).to.not.exist() done() @@ -45,10 +49,19 @@ module.exports = (createCommon, options) => { }) it('should read file', (done) => { - ipfs.files.read('/test/b', (err, buf) => { + const testDir = `/test-${hat()}` + + series([ + (cb) => ipfs.files.mkdir(testDir, cb), + (cb) => ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }, cb) + ], (err) => { expect(err).to.not.exist() - expect(buf).to.eql(Buffer.from('Hello, world!')) - done() + + ipfs.files.read(`${testDir}/a`, (err, buf) => { + expect(err).to.not.exist() + expect(buf).to.eql(Buffer.from('Hello, world!')) + done() + }) }) }) }) diff --git a/js/src/files/rm.js b/js/src/files/rm.js index 8a2145c7c..0a5346f3c 100644 --- a/js/src/files/rm.js +++ b/js/src/files/rm.js @@ -4,6 +4,8 @@ 'use strict' const chai = require('chai') +const series = require('async/series') +const hat = require('hat') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) @@ -36,31 +38,41 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should remove file, expect error', (done) => { - ipfs.files.rm('/test/a', (err) => { + it('should not remove not found file/dir, expect error', (done) => { + const testDir = `/test-${hat()}` + + ipfs.files.rm(`${testDir}/a`, (err) => { expect(err).to.exist() done() }) }) it('should remove file, expect no error', (done) => { - ipfs.files.rm('/test/c', (err) => { + const testDir = `/test-${hat()}` + + series([ + (cb) => ipfs.files.mkdir(testDir, { p: true }, cb), + (cb) => ipfs.files.write(`${testDir}/c`, Buffer.from('Hello, world!'), { create: true }, cb) + ], (err) => { expect(err).to.not.exist() - done() - }) - }) - it('should remove dir, expect error', (done) => { - ipfs.files.rm('/test/lv1/lv4', (err) => { - expect(err).to.exist() - done() + ipfs.files.rm(`${testDir}/c`, (err) => { + expect(err).to.not.exist() + done() + }) }) }) it('should remove dir, expect no error', (done) => { - ipfs.files.rm('/test/lv1/lv4', {recursive: true}, (err) => { + const testDir = `/test-${hat()}` + + ipfs.files.mkdir(`${testDir}/lv1/lv2`, { p: true }, (err) => { expect(err).to.not.exist() - done() + + ipfs.files.rm(`${testDir}/lv1/lv2`, { recursive: true }, (err) => { + expect(err).to.not.exist() + done() + }) }) }) }) diff --git a/js/src/files/stat.js b/js/src/files/stat.js index b04993a83..01d0708d0 100644 --- a/js/src/files/stat.js +++ b/js/src/files/stat.js @@ -4,8 +4,10 @@ 'use strict' const chai = require('chai') -const dirtyChai = require('dirty-chai') +const series = require('async/series') +const hat = require('hat') const loadFixture = require('aegir/fixtures') +const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) const { getDescribe, getIt } = require('../utils/mocha') @@ -44,44 +46,64 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should not stat not found, expect error', function (done) { - ipfs.files.stat('/test/404', (err) => { + it('should not stat not found file/dir, expect error', function (done) { + const testDir = `/test-${hat()}` + + ipfs.files.stat(`${testDir}/404`, (err) => { expect(err).to.exist() done() }) }) it('should stat file', function (done) { - ipfs.files.stat('/test/b', (err, stat) => { + const testDir = `/test-${hat()}` + + series([ + (cb) => ipfs.files.mkdir(testDir, { p: true }, cb), + (cb) => ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }, cb) + ], (err) => { expect(err).to.not.exist() - expect(stat).to.eql({ - type: 'file', - blocks: 1, - size: 13, - hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T', - cumulativeSize: 71, - withLocality: false, - local: undefined, - sizeLocal: undefined + + ipfs.files.stat(`${testDir}/b`, (err, stat) => { + expect(err).to.not.exist() + expect(stat).to.eql({ + type: 'file', + blocks: 1, + size: 13, + hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T', + cumulativeSize: 71, + withLocality: false, + local: undefined, + sizeLocal: undefined + }) + done() }) - done() }) }) it('should stat dir', function (done) { - ipfs.files.stat('/test', (err, stat) => { + const testDir = `/test-${hat()}` + + series([ + (cb) => ipfs.files.mkdir(testDir, { p: true }, cb), + (cb) => ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }, cb) + ], (err) => { expect(err).to.not.exist() - expect(stat).to.eql({ - type: 'directory', - blocks: 2, - size: 0, - hash: 'QmVrkkNurBCeJvPRohW5JTvJG4AxGrFg7FnmsZZUS6nJto', - cumulativeSize: 216, - withLocality: false, - local: undefined, - sizeLocal: undefined + + ipfs.files.stat(testDir, (err, stat) => { + expect(err).to.not.exist() + expect(stat).to.eql({ + type: 'directory', + blocks: 1, + size: 0, + hash: 'QmQGn7EvzJZRbhcwHrp4UeMeS56WsLmrey9JhfkymjzXQu', + cumulativeSize: 118, + withLocality: false, + local: undefined, + sizeLocal: undefined + }) + done() }) - done() }) }) diff --git a/js/src/files/write.js b/js/src/files/write.js index 3cbf19877..5739cf014 100644 --- a/js/src/files/write.js +++ b/js/src/files/write.js @@ -4,6 +4,7 @@ 'use strict' const chai = require('chai') +const hat = require('hat') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) @@ -37,14 +38,18 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) it('should not write to non existent file, expect error', function (done) { - ipfs.files.write('/test/a', Buffer.from('Hello, world!'), (err) => { + const testDir = `/test-${hat()}` + + ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), (err) => { expect(err).to.exist() done() }) }) it('should write to non existent file with create flag, expect no error', function (done) { - ipfs.files.write('/test/a', Buffer.from('Hello, world!'), {create: true}, (err) => { + const testDir = `/test-${hat()}` + + ipfs.files.write(testDir, Buffer.from('Hello, world!'), {create: true}, (err) => { expect(err).to.not.exist() done() }) From 0996d7c5b7a7306b6a17012da0673cbe62c3c71e Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Wed, 6 Jun 2018 15:59:53 +0100 Subject: [PATCH 12/41] fix: removes platform specific skips License: MIT Signed-off-by: Alan Shaw --- js/src/files-mfs.js | 508 ---------------------------- js/src/files/add-pull-stream.js | 6 +- js/src/files/add-readable-stream.js | 6 +- js/src/files/add.js | 9 +- js/src/files/get.js | 4 - js/src/ls/ls-pull-stream.js | 4 - js/src/ls/ls-readable-stream.js | 4 - js/src/ls/ls.js | 4 - package.json | 1 - 9 files changed, 3 insertions(+), 543 deletions(-) delete mode 100644 js/src/files-mfs.js diff --git a/js/src/files-mfs.js b/js/src/files-mfs.js deleted file mode 100644 index 2d8a07225..000000000 --- a/js/src/files-mfs.js +++ /dev/null @@ -1,508 +0,0 @@ -/* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - -'use strict' - -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const loadFixture = require('aegir/fixtures') -const expect = chai.expect -chai.use(dirtyChai) - -module.exports = (common) => { - describe('.files (MFS Specific)', function () { - this.timeout(40 * 1000) - - let ipfs - let withGo - - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - node.id((err, id) => { - expect(err).to.not.exist() - withGo = id.agentVersion.startsWith('go-ipfs') - done() - }) - }) - }) - }) - - after((done) => common.teardown(done)) - - describe('.mkdir', function () { - it('make directory on root', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.mkdir('/test', (err) => { - expect(err).to.not.exist() - done() - }) - }) - - it('make directory and its parents', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.mkdir('/test/lv1/lv2', { p: true }, (err) => { - expect(err).to.not.exist() - done() - }) - }) - - it('make already existent directory', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.mkdir('/', (err) => { - expect(err).to.exist() - done() - }) - }) - }) - - describe('.write', function () { - it('expect error', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.write('/test/a', Buffer.from('Hello, world!'), (err) => { - expect(err).to.exist() - done() - }) - }) - - it('expect no error', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.write('/test/a', Buffer.from('Hello, world!'), {create: true}, (err) => { - expect(err).to.not.exist() - done() - }) - }) - }) - - describe('.cp', function () { - it('copy file, expect error', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.cp(['/test/c', '/test/b'], (err) => { - expect(err).to.exist() - done() - }) - }) - - it('copy file, expect no error', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.cp(['/test/a', '/test/b'], (err) => { - expect(err).to.not.exist() - done() - }) - }) - - it('copy dir, expect error', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.cp(['/test/lv1/lv3', '/test/lv1/lv4'], (err) => { - expect(err).to.exist() - done() - }) - }) - - it('copy dir, expect no error', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.cp(['/test/lv1/lv2', '/test/lv1/lv3'], (err) => { - expect(err).to.not.exist() - done() - }) - }) - }) - - describe('.mv', function () { - it('move file, expect error', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.mv(['/test/404', '/test/a'], (err) => { - expect(err).to.exist() - done() - }) - }) - - it('move file, expect no error', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.mv(['/test/a', '/test/c'], (err) => { - expect(err).to.not.exist() - done() - }) - }) - - it('move dir, expect error', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.mv(['/test/lv1/404', '/test/lv1'], (err) => { - expect(err).to.exist() - done() - }) - }) - - it('move dir, expect no error', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.mv(['/test/lv1/lv2', '/test/lv1/lv4'], (err) => { - expect(err).to.not.exist() - done() - }) - }) - }) - - describe('.rm', function () { - it('remove file, expect error', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.rm('/test/a', (err) => { - expect(err).to.exist() - done() - }) - }) - - it('remove file, expect no error', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.rm('/test/c', (err) => { - expect(err).to.not.exist() - done() - }) - }) - - it('remove dir, expect error', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.rm('/test/lv1/lv4', (err) => { - expect(err).to.exist() - done() - }) - }) - - it('remove dir, expect no error', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.rm('/test/lv1/lv4', {recursive: true}, (err) => { - expect(err).to.not.exist() - done() - }) - }) - }) - - describe('.stat', function () { - it('stat not found, expect error', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.stat('/test/404', (err) => { - expect(err).to.exist() - done() - }) - }) - - it('stat file', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.stat('/test/b', (err, stat) => { - expect(err).to.not.exist() - expect(stat).to.eql({ - type: 'file', - blocks: 1, - size: 13, - hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T', - cumulativeSize: 71, - withLocality: false, - local: undefined, - sizeLocal: undefined - }) - done() - }) - }) - - it('stat dir', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.stat('/test', (err, stat) => { - expect(err).to.not.exist() - expect(stat).to.eql({ - type: 'directory', - blocks: 2, - size: 0, - hash: 'QmVrkkNurBCeJvPRohW5JTvJG4AxGrFg7FnmsZZUS6nJto', - cumulativeSize: 216, - withLocality: false, - local: undefined, - sizeLocal: undefined - }) - done() - }) - }) - - // TODO enable this test when this feature gets released on go-ipfs - it.skip('stat withLocal file', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.stat('/test/b', {'withLocal': true}, (err, stat) => { - expect(err).to.not.exist() - expect(stat).to.eql({ - type: 'file', - blocks: 1, - size: 13, - hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T', - cumulativeSize: 71, - withLocality: true, - local: true, - sizeLocal: 71 - }) - done() - }) - }) - - // TODO enable this test when this feature gets released on go-ipfs - it.skip('stat withLocal dir', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.stat('/test', {'withLocal': true}, (err, stat) => { - expect(err).to.not.exist() - expect(stat).to.eql({ - type: 'directory', - blocks: 2, - size: 0, - hash: 'QmVrkkNurBCeJvPRohW5JTvJG4AxGrFg7FnmsZZUS6nJto', - cumulativeSize: 216, - withLocality: true, - local: true, - sizeLocal: 216 - }) - done() - }) - }) - }) - - describe('.read', function () { - it('read not found, expect error', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.read('/test/404', (err, buf) => { - expect(err).to.exist() - expect(buf).to.not.exist() - done() - }) - }) - - it('read file', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.read('/test/b', (err, buf) => { - expect(err).to.not.exist() - expect(buf).to.eql(Buffer.from('Hello, world!')) - done() - }) - }) - }) - - describe('.ls', function () { - it('ls not found, expect error', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.ls('/test/404', (err, info) => { - expect(err).to.exist() - expect(info).to.not.exist() - done() - }) - }) - - it('ls directory', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.ls('/test', (err, info) => { - expect(err).to.not.exist() - expect(info).to.eql([ - { name: 'b', type: 0, size: 0, hash: '' }, - { name: 'lv1', type: 0, size: 0, hash: '' } - ]) - done() - }) - }) - - it('ls -l directory', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.ls('/test', { l: true }, (err, info) => { - expect(err).to.not.exist() - expect(info).to.eql([ - { - name: 'b', - type: 0, - size: 13, - hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T' - }, - { - name: 'lv1', - type: 1, - size: 0, - hash: 'QmaSPtNHYKPjNjQnYX9pdu5ocpKUQEL3itSz8LuZcoW6J5' - } - ]) - done() - }) - }) - }) - - describe('.flush', function () { - it('flush not found, expect error', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.flush('/test/404', (err) => { - expect(err).to.exist() - done() - }) - }) - - it('flush root', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.flush((err) => { - expect(err).to.not.exist() - done() - }) - }) - - it('flush specific dir', function (done) { - if (!withGo) { - console.log('Not supported in js-ipfs yet') - this.skip() - } - - ipfs.files.flush('/test', (err) => { - expect(err).to.not.exist() - done() - }) - }) - }) - - // TODO: (achingbrain) - Not yet supported in js-ipfs or go-ipfs yet') - describe.skip('.stat', () => { - const smallFile = { - cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', - data: loadFixture('js/test/fixtures/testfile.txt', 'interface-ipfs-core') - } - - before((done) => ipfs.files.add(smallFile.data, done)) - - it.skip('stat outside of mfs', function (done) { - ipfs.files.stat('/ipfs/' + smallFile.cid, (err, stat) => { - expect(err).to.not.exist() - expect(stat).to.eql({ - type: 'file', - blocks: 0, - size: 12, - hash: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', - cumulativeSize: 20, - withLocality: false, - local: undefined, - sizeLocal: undefined - }) - done() - }) - }) - }) - }) -} diff --git a/js/src/files/add-pull-stream.js b/js/src/files/add-pull-stream.js index 203030cce..d2395bc96 100644 --- a/js/src/files/add-pull-stream.js +++ b/js/src/files/add-pull-stream.js @@ -9,7 +9,6 @@ const expect = chai.expect chai.use(dirtyChai) const loadFixture = require('aegir/fixtures') const pull = require('pull-stream') -const isNode = require('detect-node') const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -51,10 +50,7 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should add stream of valid files and dirs', function (done) { - // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 - if (!isNode) { this.skip() } - + it('should add pull stream of valid files and dirs', function (done) { const content = (name) => ({ path: `test-folder/${name}`, content: directory.files[name] diff --git a/js/src/files/add-readable-stream.js b/js/src/files/add-readable-stream.js index 7e96b8908..a36fbf860 100644 --- a/js/src/files/add-readable-stream.js +++ b/js/src/files/add-readable-stream.js @@ -8,7 +8,6 @@ const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) const loadFixture = require('aegir/fixtures') -const isNode = require('detect-node') const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -50,10 +49,7 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should add stream of valid files and dirs', function (done) { - // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 - if (!isNode) { this.skip() } - + it('should add readable stream of valid files and dirs', function (done) { const content = (name) => ({ path: `test-folder/${name}`, content: directory.files[name] diff --git a/js/src/files/add.js b/js/src/files/add.js index ee15179ad..7be377104 100644 --- a/js/src/files/add.js +++ b/js/src/files/add.js @@ -11,7 +11,6 @@ const loadFixture = require('aegir/fixtures') const Readable = require('readable-stream').Readable const pull = require('pull-stream') const path = require('path') -const isNode = require('detect-node') const expectTimeout = require('../utils/expect-timeout') const { getDescribe, getIt } = require('../utils/mocha') @@ -221,9 +220,6 @@ module.exports = (createCommon, options) => { }) it('should add a nested directory as array of tupples', function (done) { - // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 - if (!isNode) { this.skip() } - const content = (name) => ({ path: `test-folder/${name}`, content: directory.files[name] @@ -252,10 +248,7 @@ module.exports = (createCommon, options) => { }) }) - it('should add a nested directory as array of tuppled with progress', function (done) { - // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 - if (!isNode) { this.skip() } - + it('should add a nested directory as array of tupples with progress', function (done) { const content = (name) => ({ path: `test-folder/${name}`, content: directory.files[name] diff --git a/js/src/files/get.js b/js/src/files/get.js index 2f490a683..497e83515 100644 --- a/js/src/files/get.js +++ b/js/src/files/get.js @@ -11,7 +11,6 @@ const loadFixture = require('aegir/fixtures') const bs58 = require('bs58') const parallel = require('async/parallel') const series = require('async/series') -const isNode = require('detect-node') const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -119,9 +118,6 @@ module.exports = (createCommon, options) => { }) it('should get a directory', function (done) { - // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 - if (!isNode) { this.skip() } - series([ (cb) => { const content = (name) => ({ diff --git a/js/src/ls/ls-pull-stream.js b/js/src/ls/ls-pull-stream.js index 4b770e0ef..7bbf2e6b7 100644 --- a/js/src/ls/ls-pull-stream.js +++ b/js/src/ls/ls-pull-stream.js @@ -9,7 +9,6 @@ const expect = chai.expect chai.use(dirtyChai) const loadFixture = require('aegir/fixtures') const pull = require('pull-stream') -const isNode = require('detect-node') const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -52,9 +51,6 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) it('should pull stream ls with a base58 encoded CID', function (done) { - // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 - if (!isNode) { this.skip() } - const content = (name) => ({ path: `test-folder/${name}`, content: directory.files[name] diff --git a/js/src/ls/ls-readable-stream.js b/js/src/ls/ls-readable-stream.js index 9c68fc4e9..faab61fb8 100644 --- a/js/src/ls/ls-readable-stream.js +++ b/js/src/ls/ls-readable-stream.js @@ -9,7 +9,6 @@ const expect = chai.expect chai.use(dirtyChai) const loadFixture = require('aegir/fixtures') const concat = require('concat-stream') -const isNode = require('detect-node') const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -56,9 +55,6 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) it('should readable stream ls with a base58 encoded CID', function (done) { - // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 - if (!isNode) { this.skip() } - const content = (name) => ({ path: `test-folder/${name}`, content: directory.files[name] diff --git a/js/src/ls/ls.js b/js/src/ls/ls.js index f866eac0c..e8c939e33 100644 --- a/js/src/ls/ls.js +++ b/js/src/ls/ls.js @@ -8,7 +8,6 @@ const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) const loadFixture = require('aegir/fixtures') -const isNode = require('detect-node') const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -51,9 +50,6 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) it('should ls with a base58 encoded CID', function (done) { - // TODO: https://github.com/ipfs/js-ipfs-api/issues/339 - if (!isNode) { this.skip() } - const content = (name) => ({ path: `test-folder/${name}`, content: directory.files[name] diff --git a/package.json b/package.json index 82d0714b6..eb2188d89 100644 --- a/package.json +++ b/package.json @@ -38,7 +38,6 @@ "chai": "^4.1.2", "cids": "~0.5.3", "concat-stream": "^1.6.2", - "detect-node": "^2.0.3", "dirty-chai": "^2.0.1", "hat": "0.0.3", "ipfs-block": "~0.7.1", From 850bf6100da5fc5244fb87f69171a9c66ec62849 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Wed, 6 Jun 2018 16:42:38 +0100 Subject: [PATCH 13/41] feat(key): modularised key License: MIT Signed-off-by: Alan Shaw --- js/src/key.js | 187 ------------------------------------------- js/src/key/export.js | 46 +++++++++++ js/src/key/gen.js | 56 +++++++++++++ js/src/key/import.js | 55 +++++++++++++ js/src/key/index.js | 13 +++ js/src/key/list.js | 63 +++++++++++++++ js/src/key/rename.js | 64 +++++++++++++++ js/src/key/rm.js | 59 ++++++++++++++ 8 files changed, 356 insertions(+), 187 deletions(-) delete mode 100644 js/src/key.js create mode 100644 js/src/key/export.js create mode 100644 js/src/key/gen.js create mode 100644 js/src/key/import.js create mode 100644 js/src/key/index.js create mode 100644 js/src/key/list.js create mode 100644 js/src/key/rename.js create mode 100644 js/src/key/rm.js diff --git a/js/src/key.js b/js/src/key.js deleted file mode 100644 index 11c11993e..000000000 --- a/js/src/key.js +++ /dev/null @@ -1,187 +0,0 @@ -/* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - -'use strict' - -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) -const hat = require('hat') -const { spawnNodeWithId } = require('./utils/spawn') - -module.exports = (common) => { - describe('.key', () => { - const keyTypes = [ - {type: 'rsa', size: 2048} - ] - const keys = [] - let ipfs - let withGo - - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - ipfs = node - withGo = node.peerId.agentVersion.startsWith('go-ipfs') - done() - }) - }) - }) - - after((done) => common.teardown(done)) - - describe('.gen', () => { - keyTypes.forEach((kt) => { - it(`creates a new ${kt.type} key`, function (done) { - this.timeout(20 * 1000) - const name = hat() - ipfs.key.gen(name, kt, (err, key) => { - expect(err).to.not.exist() - expect(key).to.exist() - expect(key).to.have.property('name', name) - expect(key).to.have.property('id') - keys.push(key) - done() - }) - }) - }) - }) - - describe('.list', () => { - let listedKeys - it('lists all the keys', (done) => { - ipfs.key.list((err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res).to.be.an('array') - expect(res.length).to.be.above(keys.length - 1) - listedKeys = res - done() - }) - }) - - it('contains the created keys', () => { - keys.forEach(ki => { - const found = listedKeys.filter(lk => ki.name === lk.name && ki.id === lk.id) - expect(found).to.have.length(1) - }) - }) - }) - - describe('.rename', () => { - let oldName - let newName - - before(() => { - oldName = keys[0].name - newName = 'x' + oldName - }) - - it('renames a key', (done) => { - ipfs.key.rename(oldName, newName, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res).to.have.property('was', oldName) - expect(res).to.have.property('now', newName) - expect(res).to.have.property('id', keys[0].id) - keys[0].name = newName - done() - }) - }) - - it('contains the new name', (done) => { - ipfs.key.list((err, res) => { - expect(err).to.not.exist() - const found = res.filter(k => k.name === newName) - expect(found).to.have.length(1) - done() - }) - }) - - it('does not contain the old name', (done) => { - ipfs.key.list((err, res) => { - expect(err).to.not.exist() - const found = res.filter(k => k.name === oldName) - expect(found).to.have.length(0) - done() - }) - }) - }) - - describe('.rm', () => { - let key - before(() => { - key = keys[0] - }) - - it('removes a key', function (done) { - ipfs.key.rm(key.name, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res).to.have.property('name', key.name) - expect(res).to.have.property('id', key.id) - done() - }) - }) - - it('does not contain the removed name', (done) => { - ipfs.key.list((err, res) => { - expect(err).to.not.exist() - const found = res.filter(k => k.name === key.name) - expect(found).to.have.length(0) - done() - }) - }) - }) - - describe('exchange', () => { - let selfPem - let passwordPem = hat() - - it('exports', function (done) { - if (withGo) { - console.log('Not supported by go-ipfs yet') - this.skip() - } - ipfs.key.export('self', passwordPem, (err, pem) => { - expect(err).to.not.exist() - expect(pem).to.exist() - selfPem = pem - done() - }) - }) - - it('imports', function (done) { - if (withGo) { - console.log('Not supported by go-ipfs yet') - this.skip() - } - ipfs.key.import('clone', selfPem, passwordPem, (err, key) => { - expect(err).to.not.exist() - expect(key).to.exist() - expect(key).to.have.property('name', 'clone') - expect(key).to.have.property('id') - done() - }) - }) - - it('removes', function (done) { - if (withGo) { - console.log('Not supported by go-ipfs yet') - this.skip() - } - ipfs.key.rm('clone', (err) => { - expect(err).to.not.exist() - done() - }) - }) - }) - }) -} diff --git a/js/src/key/export.js b/js/src/key/export.js new file mode 100644 index 000000000..a21d66666 --- /dev/null +++ b/js/src/key/export.js @@ -0,0 +1,46 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const hat = require('hat') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.key.export', () => { + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should export "self" key', function (done) { + ipfs.key.export('self', hat(), (err, pem) => { + expect(err).to.not.exist() + expect(pem).to.exist() + done() + }) + }) + }) +} diff --git a/js/src/key/gen.js b/js/src/key/gen.js new file mode 100644 index 000000000..3da9961c1 --- /dev/null +++ b/js/src/key/gen.js @@ -0,0 +1,56 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const hat = require('hat') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.key.gen', () => { + const keyTypes = [ + { type: 'rsa', size: 2048 } + ] + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + keyTypes.forEach((kt) => { + it(`should generate a new ${kt.type} key`, function (done) { + this.timeout(20 * 1000) + const name = hat() + ipfs.key.gen(name, kt, (err, key) => { + expect(err).to.not.exist() + expect(key).to.exist() + expect(key).to.have.property('name', name) + expect(key).to.have.property('id') + done() + }) + }) + }) + }) +} diff --git a/js/src/key/import.js b/js/src/key/import.js new file mode 100644 index 000000000..863d4c84d --- /dev/null +++ b/js/src/key/import.js @@ -0,0 +1,55 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const hat = require('hat') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.key.import', () => { + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should import an exported key', (done) => { + const password = hat() + + ipfs.key.export('self', password, (err, pem) => { + expect(err).to.not.exist() + expect(pem).to.exist() + + ipfs.key.import('clone', pem, password, (err, key) => { + expect(err).to.not.exist() + expect(key).to.exist() + expect(key).to.have.property('name', 'clone') + expect(key).to.have.property('id') + done() + }) + }) + }) + }) +} diff --git a/js/src/key/index.js b/js/src/key/index.js new file mode 100644 index 000000000..cbe2aaa35 --- /dev/null +++ b/js/src/key/index.js @@ -0,0 +1,13 @@ +'use strict' +const { createSuite } = require('../utils/suite') + +const tests = { + gen: require('./gen'), + list: require('./list'), + rename: require('./rename'), + rm: require('./rm'), + export: require('./export'), + import: require('./import') +} + +module.exports = createSuite(tests) diff --git a/js/src/key/list.js b/js/src/key/list.js new file mode 100644 index 000000000..6948a0729 --- /dev/null +++ b/js/src/key/list.js @@ -0,0 +1,63 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const times = require('async/times') +const expect = chai.expect +chai.use(dirtyChai) +const hat = require('hat') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.key.list', () => { + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should list all the keys', function (done) { + this.timeout(60 * 1000) + + times(3, (n, cb) => { + ipfs.key.gen(hat(), { type: 'rsa', size: 2048 }, cb) + }, (err, keys) => { + expect(err).to.not.exist() + + ipfs.key.list((err, res) => { + expect(err).to.not.exist() + expect(res).to.exist() + expect(res).to.be.an('array') + expect(res.length).to.be.above(keys.length - 1) + + keys.forEach(key => { + const found = res.find(({ id, name }) => name === key.name && id === key.id) + expect(found).to.exist() + }) + + done() + }) + }) + }) + }) +} diff --git a/js/src/key/rename.js b/js/src/key/rename.js new file mode 100644 index 000000000..f352f1cea --- /dev/null +++ b/js/src/key/rename.js @@ -0,0 +1,64 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const hat = require('hat') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.key.rename', () => { + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should rename a key', function (done) { + this.timeout(30 * 1000) + + const oldName = hat() + const newName = hat() + + ipfs.key.gen(oldName, { type: 'rsa', size: 2048 }, (err, key) => { + expect(err).to.not.exist() + + ipfs.key.rename(oldName, newName, (err, res) => { + expect(err).to.not.exist() + expect(res).to.exist() + expect(res).to.have.property('was', oldName) + expect(res).to.have.property('now', newName) + expect(res).to.have.property('id', key.id) + + ipfs.key.list((err, res) => { + expect(err).to.not.exist() + expect(res.find(k => k.name === newName)).to.exist() + expect(res.find(k => k.name === oldName)).to.not.exist() + done() + }) + }) + }) + }) + }) +} diff --git a/js/src/key/rm.js b/js/src/key/rm.js new file mode 100644 index 000000000..864065efc --- /dev/null +++ b/js/src/key/rm.js @@ -0,0 +1,59 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const hat = require('hat') +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.key.rm', () => { + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should rm a key', function (done) { + this.timeout(30 * 1000) + + ipfs.key.gen(hat(), { type: 'rsa', size: 2048 }, (err, key) => { + expect(err).to.not.exist() + + ipfs.key.rm(key.name, (err, res) => { + expect(err).to.not.exist() + expect(res).to.exist() + expect(res).to.have.property('name', key.name) + expect(res).to.have.property('id', key.id) + + ipfs.key.list((err, res) => { + expect(err).to.not.exist() + expect(res.find(k => k.name === key.name)).to.not.exist() + done() + }) + }) + }) + }) + }) +} From dd0cb8ec8ceb52bbd54e4ef6009b0ac088b56714 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Wed, 6 Jun 2018 17:01:10 +0100 Subject: [PATCH 14/41] feat: modularises miscellaneous License: MIT Signed-off-by: Alan Shaw --- js/src/miscellaneous.js | 100 -------------------------------- js/src/miscellaneous/dns.js | 49 ++++++++++++++++ js/src/miscellaneous/id.js | 56 ++++++++++++++++++ js/src/miscellaneous/index.js | 11 ++++ js/src/miscellaneous/stop.js | 55 ++++++++++++++++++ js/src/miscellaneous/version.js | 58 ++++++++++++++++++ 6 files changed, 229 insertions(+), 100 deletions(-) delete mode 100644 js/src/miscellaneous.js create mode 100644 js/src/miscellaneous/dns.js create mode 100644 js/src/miscellaneous/id.js create mode 100644 js/src/miscellaneous/index.js create mode 100644 js/src/miscellaneous/stop.js create mode 100644 js/src/miscellaneous/version.js diff --git a/js/src/miscellaneous.js b/js/src/miscellaneous.js deleted file mode 100644 index 7c3030435..000000000 --- a/js/src/miscellaneous.js +++ /dev/null @@ -1,100 +0,0 @@ -/* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - -'use strict' - -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) -const { spawnNodeWithId } = require('./utils/spawn') - -module.exports = (common) => { - describe('.miscellaneous', () => { - let ipfs - let withGo - - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - ipfs = node - withGo = node.peerId.agentVersion.startsWith('go-ipfs') - done() - }) - }) - }) - - after((done) => { - common.teardown(done) - }) - - it('.id', (done) => { - ipfs.id((err, res) => { - expect(err).to.not.exist() - expect(res).to.have.a.property('id') - expect(res).to.have.a.property('publicKey') - done() - }) - }) - - it('.version', (done) => { - ipfs.version((err, result) => { - expect(err).to.not.exist() - expect(result).to.have.a.property('version') - expect(result).to.have.a.property('commit') - expect(result).to.have.a.property('repo') - done() - }) - }) - - it('.dns', function (done) { - this.timeout(20 * 1000) - - ipfs.dns('ipfs.io', (err, path) => { - expect(err).to.not.exist() - expect(path).to.exist() - done() - }) - }) - - it('.id Promises support', () => { - return ipfs.id() - .then((res) => { - expect(res).to.have.a.property('id') - expect(res).to.have.a.property('publicKey') - }) - }) - - it('.version Promises support', () => { - return ipfs.version() - .then((result) => { - expect(result).to.have.a.property('version') - expect(result).to.have.a.property('commit') - expect(result).to.have.a.property('repo') - }) - }) - - // must be last test to run - it('.stop', function (done) { - this.timeout(10 * 1000) - ipfs.stop((err) => { - // TODO: go-ipfs returns an error, https://github.com/ipfs/go-ipfs/issues/4078 - if (!withGo) { - expect(err).to.not.exist() - } - // Trying to stop an already stopped node should return an error - // as the node can't respond to requests anymore - ipfs.stop((err) => { - expect(err).to.exist() - done() - }) - }) - }) - }) -} diff --git a/js/src/miscellaneous/dns.js b/js/src/miscellaneous/dns.js new file mode 100644 index 000000000..1851a9d2c --- /dev/null +++ b/js/src/miscellaneous/dns.js @@ -0,0 +1,49 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.dns', () => { + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => { + common.teardown(done) + }) + + it('should resolve a DNS link', function (done) { + this.timeout(20 * 1000) + + ipfs.dns('ipfs.io', (err, path) => { + expect(err).to.not.exist() + expect(path).to.exist() + done() + }) + }) + }) +} diff --git a/js/src/miscellaneous/id.js b/js/src/miscellaneous/id.js new file mode 100644 index 000000000..d60114407 --- /dev/null +++ b/js/src/miscellaneous/id.js @@ -0,0 +1,56 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.id', () => { + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => { + common.teardown(done) + }) + + it('should get the node ID', (done) => { + ipfs.id((err, res) => { + expect(err).to.not.exist() + expect(res).to.have.a.property('id') + expect(res).to.have.a.property('publicKey') + done() + }) + }) + + it('should get the node ID (promised)', () => { + return ipfs.id() + .then((res) => { + expect(res).to.have.a.property('id') + expect(res).to.have.a.property('publicKey') + }) + }) + }) +} diff --git a/js/src/miscellaneous/index.js b/js/src/miscellaneous/index.js new file mode 100644 index 000000000..b3a5dc603 --- /dev/null +++ b/js/src/miscellaneous/index.js @@ -0,0 +1,11 @@ +'use strict' +const { createSuite } = require('../utils/suite') + +const tests = { + id: require('./id'), + version: require('./version'), + dns: require('./dns'), + stop: require('./stop') +} + +module.exports = createSuite(tests) diff --git a/js/src/miscellaneous/stop.js b/js/src/miscellaneous/stop.js new file mode 100644 index 000000000..a2982fbbe --- /dev/null +++ b/js/src/miscellaneous/stop.js @@ -0,0 +1,55 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.stop', () => { + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => { + common.teardown(done) + }) + + // must be last test to run + it('should stop the node', function (done) { + this.timeout(10 * 1000) + + ipfs.stop((err) => { + expect(err).to.not.exist() + + // Trying to stop an already stopped node should return an error + // as the node can't respond to requests anymore + ipfs.stop((err) => { + expect(err).to.exist() + done() + }) + }) + }) + }) +} diff --git a/js/src/miscellaneous/version.js b/js/src/miscellaneous/version.js new file mode 100644 index 000000000..3d7f06e7a --- /dev/null +++ b/js/src/miscellaneous/version.js @@ -0,0 +1,58 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const { getDescribe, getIt } = require('../utils/mocha') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.version', () => { + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => { + common.teardown(done) + }) + + it('should get the node version', (done) => { + ipfs.version((err, result) => { + expect(err).to.not.exist() + expect(result).to.have.a.property('version') + expect(result).to.have.a.property('commit') + expect(result).to.have.a.property('repo') + done() + }) + }) + + it('should get the node version (promised)', () => { + return ipfs.version() + .then((result) => { + expect(result).to.have.a.property('version') + expect(result).to.have.a.property('commit') + expect(result).to.have.a.property('repo') + }) + }) + }) +} From f861431c72756826ca38261a6ed86bc2334b1ade Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Wed, 6 Jun 2018 23:13:53 +0100 Subject: [PATCH 15/41] feat: modularised ping License: MIT Signed-off-by: Alan Shaw --- js/src/ping.js | 236 ---------------------------- js/src/ping/index.js | 10 ++ js/src/ping/ping-pull-stream.js | 107 +++++++++++++ js/src/ping/ping-readable-stream.js | 127 +++++++++++++++ js/src/ping/ping.js | 81 ++++++++++ js/src/ping/utils.js | 23 +++ 6 files changed, 348 insertions(+), 236 deletions(-) delete mode 100644 js/src/ping.js create mode 100644 js/src/ping/index.js create mode 100644 js/src/ping/ping-pull-stream.js create mode 100644 js/src/ping/ping-readable-stream.js create mode 100644 js/src/ping/ping.js create mode 100644 js/src/ping/utils.js diff --git a/js/src/ping.js b/js/src/ping.js deleted file mode 100644 index de88498dd..000000000 --- a/js/src/ping.js +++ /dev/null @@ -1,236 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const pull = require('pull-stream') -const pump = require('pump') -const { Writable } = require('stream') -const series = require('async/series') -const { spawnNodesWithId } = require('./utils/spawn') - -const expect = chai.expect -chai.use(dirtyChai) - -function expectIsPingResponse (obj) { - expect(obj).to.have.a.property('success') - expect(obj).to.have.a.property('time') - expect(obj).to.have.a.property('text') - expect(obj.success).to.be.a('boolean') - expect(obj.time).to.be.a('number') - expect(obj.text).to.be.a('string') -} - -// Determine if a ping response object is a pong, or something else, like a status message -function isPong (pingResponse) { - return Boolean(pingResponse && pingResponse.success && !pingResponse.text) -} - -module.exports = (common) => { - describe('.ping', function () { - let ipfsA - let ipfsB - - before(function (done) { - this.timeout(60 * 1000) - - common.setup((err, factory) => { - if (err) return done(err) - - series([ - (cb) => { - spawnNodesWithId(2, factory, (err, nodes) => { - if (err) return cb(err) - ipfsA = nodes[0] - ipfsB = nodes[1] - cb() - }) - }, - (cb) => ipfsA.swarm.connect(ipfsB.peerId.addresses[0], cb) - ], done) - }) - }) - - after((done) => common.teardown(done)) - - describe('.ping', function () { - this.timeout(15 * 1000) - - it('sends the specified number of packets', (done) => { - const count = 3 - ipfsA.ping(ipfsB.peerId.id, { count }, (err, responses) => { - expect(err).to.not.exist() - responses.forEach(expectIsPingResponse) - const pongs = responses.filter(isPong) - expect(pongs.length).to.equal(count) - done() - }) - }) - - it('fails when pinging an unknown peer', (done) => { - const unknownPeerId = 'QmUmaEnH1uMmvckMZbh3yShaasvELPW4ZLPWnB4entMTEn' - const count = 2 - - ipfsA.ping(unknownPeerId, { count }, (err, responses) => { - expect(err).to.exist() - expect(responses[0].text).to.include('Looking up') - expect(responses[1].success).to.be.false() - done() - }) - }) - - it('fails when pinging an invalid peer', (done) => { - const invalidPeerId = 'not a peer ID' - const count = 2 - ipfsA.ping(invalidPeerId, { count }, (err, responses) => { - expect(err).to.exist() - expect(err.message).to.include('failed to parse peer address') - done() - }) - }) - }) - - describe('.pingPullStream', function () { - this.timeout(15 * 1000) - - it('sends the specified number of packets', (done) => { - let packetNum = 0 - const count = 3 - pull( - ipfsA.pingPullStream(ipfsB.peerId.id, { count }), - pull.drain((res) => { - expect(res.success).to.be.true() - // It's a pong - if (isPong(res)) { - packetNum++ - } - }, (err) => { - expect(err).to.not.exist() - expect(packetNum).to.equal(count) - done() - }) - ) - }) - - it('fails when pinging an unknown peer', (done) => { - let messageNum = 0 - const unknownPeerId = 'QmUmaEnH1uMmvckMZbh3yShaasvELPW4ZLPWnB4entMTEn' - const count = 2 - pull( - ipfsA.pingPullStream(unknownPeerId, { count }), - pull.drain((res) => { - expectIsPingResponse(res) - messageNum++ - - // First message should be "looking up" response - if (messageNum === 1) { - expect(res.text).to.include('Looking up') - } - - // Second message should be a failure response - if (messageNum === 2) { - expect(res.success).to.be.false() - } - }, (err) => { - expect(err).to.exist() - done() - }) - ) - }) - - it('fails when pinging an invalid peer', (done) => { - const invalidPeerId = 'not a peer ID' - const count = 2 - pull( - ipfsA.pingPullStream(invalidPeerId, { count }), - pull.collect((err) => { - expect(err).to.exist() - expect(err.message).to.include('failed to parse peer address') - done() - }) - ) - }) - }) - - describe('.pingReadableStream', function () { - this.timeout(15 * 1000) - - it('sends the specified number of packets', (done) => { - let packetNum = 0 - const count = 3 - - pump( - ipfsA.pingReadableStream(ipfsB.peerId.id, { count }), - new Writable({ - objectMode: true, - write (res, enc, cb) { - expect(res.success).to.be.true() - // It's a pong - if (isPong(res)) { - packetNum++ - } - - cb() - } - }), - (err) => { - expect(err).to.not.exist() - expect(packetNum).to.equal(count) - done() - } - ) - }) - - it('fails when pinging an unknown peer', (done) => { - let messageNum = 0 - const unknownPeerId = 'QmUmaEnH1uMmvckMZbh3yShaasvELPW4ZLPWnB4entMTEn' - const count = 2 - - pump( - ipfsA.pingReadableStream(unknownPeerId, { count }), - new Writable({ - objectMode: true, - write (res, enc, cb) { - expectIsPingResponse(res) - messageNum++ - - // First message should be "looking up" response - if (messageNum === 1) { - expect(res.text).to.include('Looking up') - } - - // Second message should be a failure response - if (messageNum === 2) { - expect(res.success).to.be.false() - } - - cb() - } - }), - (err) => { - expect(err).to.exist() - done() - } - ) - }) - - it('fails when pinging an invalid peer', (done) => { - const invalidPeerId = 'not a peer ID' - const count = 2 - - pump( - ipfsA.pingReadableStream(invalidPeerId, { count }), - new Writable({ - objectMode: true, - write: (chunk, enc, cb) => cb() - }), - (err) => { - expect(err).to.exist() - expect(err.message).to.include('failed to parse peer address') - done() - } - ) - }) - }) - }) -} diff --git a/js/src/ping/index.js b/js/src/ping/index.js new file mode 100644 index 000000000..a33bbddc1 --- /dev/null +++ b/js/src/ping/index.js @@ -0,0 +1,10 @@ +'use strict' +const { createSuite } = require('../utils/suite') + +const tests = { + ping: require('./ping'), + pingPullStream: require('./ping-pull-stream'), + pingReadableStream: require('./ping-readable-stream') +} + +module.exports = createSuite(tests) diff --git a/js/src/ping/ping-pull-stream.js b/js/src/ping/ping-pull-stream.js new file mode 100644 index 000000000..0fe689dad --- /dev/null +++ b/js/src/ping/ping-pull-stream.js @@ -0,0 +1,107 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const pull = require('pull-stream') +const series = require('async/series') +const { spawnNodesWithId } = require('../utils/spawn') +const { waitUntilConnected } = require('../utils/connections') +const { getDescribe, getIt } = require('../utils/mocha') +const { expectIsPingResponse, isPong } = require('./utils') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.pingPullStream', function () { + this.timeout(15 * 1000) + + let ipfsdA + let ipfsdB + + before(function (done) { + this.timeout(60 * 1000) + + common.setup((err, factory) => { + if (err) return done(err) + + series([ + (cb) => { + spawnNodesWithId(2, factory, (err, nodes) => { + if (err) return cb(err) + ipfsdA = nodes[0] + ipfsdB = nodes[1] + cb() + }) + }, + (cb) => waitUntilConnected(ipfsdA, ipfsdB, cb) + ], done) + }) + }) + + after((done) => common.teardown(done)) + + it('should send the specified number of packets over pull stream', (done) => { + let packetNum = 0 + const count = 3 + pull( + ipfsdA.pingPullStream(ipfsdB.peerId.id, { count }), + pull.drain((res) => { + expect(res.success).to.be.true() + // It's a pong + if (isPong(res)) { + packetNum++ + } + }, (err) => { + expect(err).to.not.exist() + expect(packetNum).to.equal(count) + done() + }) + ) + }) + + it('should fail when pinging an unknown peer over pull stream', (done) => { + let messageNum = 0 + const unknownPeerId = 'QmUmaEnH1uMmvckMZbh3yShaasvELPW4ZLPWnB4entMTEn' + const count = 2 + pull( + ipfsdA.pingPullStream(unknownPeerId, { count }), + pull.drain((res) => { + expectIsPingResponse(res) + messageNum++ + + // First message should be "looking up" response + if (messageNum === 1) { + expect(res.text).to.include('Looking up') + } + + // Second message should be a failure response + if (messageNum === 2) { + expect(res.success).to.be.false() + } + }, (err) => { + expect(err).to.exist() + done() + }) + ) + }) + + it('should fail when pinging an invalid peer over pull stream', (done) => { + const invalidPeerId = 'not a peer ID' + const count = 2 + pull( + ipfsdA.pingPullStream(invalidPeerId, { count }), + pull.collect((err) => { + expect(err).to.exist() + expect(err.message).to.include('failed to parse peer address') + done() + }) + ) + }) + }) +} diff --git a/js/src/ping/ping-readable-stream.js b/js/src/ping/ping-readable-stream.js new file mode 100644 index 000000000..df0a67e83 --- /dev/null +++ b/js/src/ping/ping-readable-stream.js @@ -0,0 +1,127 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const pump = require('pump') +const { Writable } = require('stream') +const series = require('async/series') +const { spawnNodesWithId } = require('../utils/spawn') +const { waitUntilConnected } = require('../utils/connections') +const { getDescribe, getIt } = require('../utils/mocha') +const { expectIsPingResponse, isPong } = require('./utils') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.pingReadableStream', function () { + this.timeout(15 * 1000) + + let ipfsdA + let ipfsdB + + before(function (done) { + this.timeout(60 * 1000) + + common.setup((err, factory) => { + if (err) return done(err) + + series([ + (cb) => { + spawnNodesWithId(2, factory, (err, nodes) => { + if (err) return cb(err) + ipfsdA = nodes[0] + ipfsdB = nodes[1] + cb() + }) + }, + (cb) => waitUntilConnected(ipfsdA, ipfsdB, cb) + ], done) + }) + }) + + after((done) => common.teardown(done)) + + it('should send the specified number of packets over readable stream', (done) => { + let packetNum = 0 + const count = 3 + + pump( + ipfsdA.pingReadableStream(ipfsdB.peerId.id, { count }), + new Writable({ + objectMode: true, + write (res, enc, cb) { + expect(res.success).to.be.true() + // It's a pong + if (isPong(res)) { + packetNum++ + } + + cb() + } + }), + (err) => { + expect(err).to.not.exist() + expect(packetNum).to.equal(count) + done() + } + ) + }) + + it('should fail when pinging an unknown peer over readable stream', (done) => { + let messageNum = 0 + const unknownPeerId = 'QmUmaEnH1uMmvckMZbh3yShaasvELPW4ZLPWnB4entMTEn' + const count = 2 + + pump( + ipfsdA.pingReadableStream(unknownPeerId, { count }), + new Writable({ + objectMode: true, + write (res, enc, cb) { + expectIsPingResponse(res) + messageNum++ + + // First message should be "looking up" response + if (messageNum === 1) { + expect(res.text).to.include('Looking up') + } + + // Second message should be a failure response + if (messageNum === 2) { + expect(res.success).to.be.false() + } + + cb() + } + }), + (err) => { + expect(err).to.exist() + done() + } + ) + }) + + it('should fail when pinging an invalid peer over readable stream', (done) => { + const invalidPeerId = 'not a peer ID' + const count = 2 + + pump( + ipfsdA.pingReadableStream(invalidPeerId, { count }), + new Writable({ + objectMode: true, + write: (chunk, enc, cb) => cb() + }), + (err) => { + expect(err).to.exist() + expect(err.message).to.include('failed to parse peer address') + done() + } + ) + }) + }) +} diff --git a/js/src/ping/ping.js b/js/src/ping/ping.js new file mode 100644 index 000000000..78d495b94 --- /dev/null +++ b/js/src/ping/ping.js @@ -0,0 +1,81 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const series = require('async/series') +const { spawnNodesWithId } = require('../utils/spawn') +const { waitUntilConnected } = require('../utils/connections') +const { getDescribe, getIt } = require('../utils/mocha') +const { expectIsPingResponse, isPong } = require('./utils') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.ping', function () { + this.timeout(15 * 1000) + + let ipfsdA + let ipfsdB + + before(function (done) { + this.timeout(60 * 1000) + + common.setup((err, factory) => { + if (err) return done(err) + + series([ + (cb) => { + spawnNodesWithId(2, factory, (err, nodes) => { + if (err) return cb(err) + ipfsdA = nodes[0] + ipfsdB = nodes[1] + cb() + }) + }, + (cb) => waitUntilConnected(ipfsdA, ipfsdB, cb) + ], done) + }) + }) + + after((done) => common.teardown(done)) + + it('should send the specified number of packets', (done) => { + const count = 3 + ipfsdA.ping(ipfsdB.peerId.id, { count }, (err, responses) => { + expect(err).to.not.exist() + responses.forEach(expectIsPingResponse) + const pongs = responses.filter(isPong) + expect(pongs.length).to.equal(count) + done() + }) + }) + + it('should fail when pinging an unknown peer', (done) => { + const unknownPeerId = 'QmUmaEnH1uMmvckMZbh3yShaasvELPW4ZLPWnB4entMTEn' + const count = 2 + + ipfsdA.ping(unknownPeerId, { count }, (err, responses) => { + expect(err).to.exist() + expect(responses[0].text).to.include('Looking up') + expect(responses[1].success).to.be.false() + done() + }) + }) + + it('should fail when pinging an invalid peer', (done) => { + const invalidPeerId = 'not a peer ID' + const count = 2 + ipfsdA.ping(invalidPeerId, { count }, (err, responses) => { + expect(err).to.exist() + expect(err.message).to.include('failed to parse peer address') + done() + }) + }) + }) +} diff --git a/js/src/ping/utils.js b/js/src/ping/utils.js new file mode 100644 index 000000000..beae05769 --- /dev/null +++ b/js/src/ping/utils.js @@ -0,0 +1,23 @@ +const chai = require('chai') +const dirtyChai = require('dirty-chai') + +const expect = chai.expect +chai.use(dirtyChai) + +function expectIsPingResponse (obj) { + expect(obj).to.have.a.property('success') + expect(obj).to.have.a.property('time') + expect(obj).to.have.a.property('text') + expect(obj.success).to.be.a('boolean') + expect(obj.time).to.be.a('number') + expect(obj.text).to.be.a('string') +} + +exports.expectIsPingResponse = expectIsPingResponse + +// Determine if a ping response object is a pong, or something else, like a status message +function isPong (pingResponse) { + return Boolean(pingResponse && pingResponse.success && !pingResponse.text) +} + +exports.isPong = isPong From dd288ec3df9b5c0a15571d7c115271cabbd17cee Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Thu, 7 Jun 2018 10:39:58 +0100 Subject: [PATCH 16/41] feat: modularised repo and stats License: MIT Signed-off-by: Alan Shaw --- js/src/repo/gc.js | 52 ++++++++++++ js/src/repo/index.js | 10 +++ js/src/repo/stat.js | 52 ++++++++++++ js/src/{repo.js => repo/version.js} | 46 +++-------- js/src/stats.js | 97 ----------------------- js/src/stats/bitswap.js | 52 ++++++++++++ js/src/stats/bw-pull-stream.js | 52 ++++++++++++ js/src/stats/bw-readable-stream.js | 49 ++++++++++++ js/src/stats/bw.js | 52 ++++++++++++ js/src/stats/index.js | 12 +++ js/src/stats/repo.js | 52 ++++++++++++ js/src/{utils/stats.js => stats/utils.js} | 6 +- 12 files changed, 397 insertions(+), 135 deletions(-) create mode 100644 js/src/repo/gc.js create mode 100644 js/src/repo/index.js create mode 100644 js/src/repo/stat.js rename js/src/{repo.js => repo/version.js} (52%) delete mode 100644 js/src/stats.js create mode 100644 js/src/stats/bitswap.js create mode 100644 js/src/stats/bw-pull-stream.js create mode 100644 js/src/stats/bw-readable-stream.js create mode 100644 js/src/stats/bw.js create mode 100644 js/src/stats/index.js create mode 100644 js/src/stats/repo.js rename js/src/{utils/stats.js => stats/utils.js} (93%) diff --git a/js/src/repo/gc.js b/js/src/repo/gc.js new file mode 100644 index 000000000..2fd1b74dd --- /dev/null +++ b/js/src/repo/gc.js @@ -0,0 +1,52 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.repo.gc', () => { + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should run garbage collection', (done) => { + ipfs.repo.gc((err, res) => { + expect(err).to.not.exist() + expect(res).to.exist() + done() + }) + }) + + it('should run garbage collection (promised)', () => { + return ipfs.repo.gc().then((res) => { + expect(res).to.exist() + }) + }) + }) +} diff --git a/js/src/repo/index.js b/js/src/repo/index.js new file mode 100644 index 000000000..f8daaea23 --- /dev/null +++ b/js/src/repo/index.js @@ -0,0 +1,10 @@ +'use strict' +const { createSuite } = require('../utils/suite') + +const tests = { + version: require('./version'), + stat: require('./stat'), + gc: require('./gc') +} + +module.exports = createSuite(tests) diff --git a/js/src/repo/stat.js b/js/src/repo/stat.js new file mode 100644 index 000000000..b8bc85dda --- /dev/null +++ b/js/src/repo/stat.js @@ -0,0 +1,52 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const { expectIsRepo } = require('../stats/utils') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.repo.stat', () => { + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should get repo stats', (done) => { + ipfs.repo.stat((err, res) => { + expectIsRepo(err, res) + done() + }) + }) + + it('should get repo stats (promised)', () => { + return ipfs.repo.stat().then((res) => { + expectIsRepo(null, res) + }) + }) + }) +} diff --git a/js/src/repo.js b/js/src/repo/version.js similarity index 52% rename from js/src/repo.js rename to js/src/repo/version.js index db143a268..aa6ebd4ee 100644 --- a/js/src/repo.js +++ b/js/src/repo/version.js @@ -5,12 +5,17 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') -const statsTests = require('./utils/stats') +const { getDescribe, getIt } = require('../utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -module.exports = (common) => { - describe('.repo', () => { +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.repo.version', () => { let ipfs before(function (done) { @@ -28,11 +33,9 @@ module.exports = (common) => { }) }) - after((done) => { - common.teardown(done) - }) + after((done) => common.teardown(done)) - it('.version', (done) => { + it('should get the repo version', (done) => { ipfs.repo.version((err, version) => { expect(err).to.not.exist() expect(version).to.exist() @@ -40,37 +43,10 @@ module.exports = (common) => { }) }) - it('.version Promise', () => { + it('should get the repo version (promised)', () => { return ipfs.repo.version().then((version) => { expect(version).to.exist() }) }) - - it('.stat', (done) => { - ipfs.repo.stat((err, res) => { - statsTests.expectIsRepo(err, res) - done() - }) - }) - - it('.stat Promise', () => { - return ipfs.repo.stat().then((res) => { - statsTests.expectIsRepo(null, res) - }) - }) - - it('.gc', (done) => { - ipfs.repo.gc((err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - done() - }) - }) - - it('.gc Promise', () => { - return ipfs.repo.gc().then((res) => { - expect(res).to.exist() - }) - }) }) } diff --git a/js/src/stats.js b/js/src/stats.js deleted file mode 100644 index 83441e024..000000000 --- a/js/src/stats.js +++ /dev/null @@ -1,97 +0,0 @@ -/* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - -'use strict' - -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const statsTests = require('./utils/stats') -const expect = chai.expect -const pull = require('pull-stream') -chai.use(dirtyChai) - -module.exports = (common) => { - describe('.stats', () => { - let ipfs - - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => { - common.teardown(done) - }) - - it('.bitswap', (done) => { - ipfs.stats.bitswap((err, res) => { - statsTests.expectIsBitswap(err, res) - done() - }) - }) - - it('.bitswap Promise', () => { - return ipfs.stats.bitswap().then((res) => { - statsTests.expectIsBitswap(null, res) - }) - }) - - it('.bw', function (done) { - ipfs.stats.bw((err, res) => { - statsTests.expectIsBandwidth(err, res) - done() - }) - }) - - it('.bw Promise', () => { - return ipfs.stats.bw().then((res) => { - statsTests.expectIsBandwidth(null, res) - }) - }) - - it('.bwReadableStream', (done) => { - const stream = ipfs.stats.bwReadableStream() - - stream.once('data', (data) => { - statsTests.expectIsBandwidth(null, data) - stream.destroy() - done() - }) - }) - - it('.bwPullStream', (done) => { - const stream = ipfs.stats.bwPullStream() - - pull( - stream, - pull.collect((err, data) => { - statsTests.expectIsBandwidth(err, data[0]) - done() - }) - ) - }) - - it('.repo', (done) => { - ipfs.stats.repo((err, res) => { - statsTests.expectIsRepo(err, res) - done() - }) - }) - - it('.repo Promise', () => { - return ipfs.stats.repo().then((res) => { - statsTests.expectIsRepo(null, res) - }) - }) - }) -} diff --git a/js/src/stats/bitswap.js b/js/src/stats/bitswap.js new file mode 100644 index 000000000..d00d8b00d --- /dev/null +++ b/js/src/stats/bitswap.js @@ -0,0 +1,52 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') +const { expectIsBitswap } = require('./utils') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.stats.bitswap', () => { + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should get bitswap stats', (done) => { + ipfs.stats.bitswap((err, res) => { + expectIsBitswap(err, res) + done() + }) + }) + + it('should get bitswap stats (promised)', () => { + return ipfs.stats.bitswap().then((res) => { + expectIsBitswap(null, res) + }) + }) + }) +} diff --git a/js/src/stats/bw-pull-stream.js b/js/src/stats/bw-pull-stream.js new file mode 100644 index 000000000..667337a31 --- /dev/null +++ b/js/src/stats/bw-pull-stream.js @@ -0,0 +1,52 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const { expectIsBandwidth } = require('./utils') +const pull = require('pull-stream') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.stats.bwPullStream', () => { + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should get bandwidth stats over pull stream', (done) => { + const stream = ipfs.stats.bwPullStream() + + pull( + stream, + pull.collect((err, data) => { + expectIsBandwidth(err, data[0]) + done() + }) + ) + }) + }) +} diff --git a/js/src/stats/bw-readable-stream.js b/js/src/stats/bw-readable-stream.js new file mode 100644 index 000000000..7eb05d496 --- /dev/null +++ b/js/src/stats/bw-readable-stream.js @@ -0,0 +1,49 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const { expectIsBandwidth } = require('./utils') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.stats.bwReadableStream', () => { + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should get bandwidth stats over readable stream', (done) => { + const stream = ipfs.stats.bwReadableStream() + + stream.once('data', (data) => { + expectIsBandwidth(null, data) + stream.destroy() + done() + }) + }) + }) +} diff --git a/js/src/stats/bw.js b/js/src/stats/bw.js new file mode 100644 index 000000000..6668e5d4f --- /dev/null +++ b/js/src/stats/bw.js @@ -0,0 +1,52 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const { expectIsBandwidth } = require('./utils') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.stats.bw', () => { + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should get bandwidth stats', function (done) { + ipfs.stats.bw((err, res) => { + expectIsBandwidth(err, res) + done() + }) + }) + + it('should get bandwidth stats (promised)', () => { + return ipfs.stats.bw().then((res) => { + expectIsBandwidth(null, res) + }) + }) + }) +} diff --git a/js/src/stats/index.js b/js/src/stats/index.js new file mode 100644 index 000000000..e07efd478 --- /dev/null +++ b/js/src/stats/index.js @@ -0,0 +1,12 @@ +'use strict' +const { createSuite } = require('../utils/suite') + +const tests = { + bitswap: require('./bitswap'), + bw: require('./bw'), + bwPullStream: require('./bw-pull-stream'), + bwReadableStream: require('./bw-readable-stream'), + repo: require('./repo') +} + +module.exports = createSuite(tests) diff --git a/js/src/stats/repo.js b/js/src/stats/repo.js new file mode 100644 index 000000000..c6dabbf68 --- /dev/null +++ b/js/src/stats/repo.js @@ -0,0 +1,52 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const { expectIsRepo } = require('./utils') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.stats.repo', () => { + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should get repo stats', (done) => { + ipfs.stats.repo((err, res) => { + expectIsRepo(err, res) + done() + }) + }) + + it('should get repo stats (promised)', () => { + return ipfs.stats.repo().then((res) => { + expectIsRepo(null, res) + }) + }) + }) +} diff --git a/js/src/utils/stats.js b/js/src/stats/utils.js similarity index 93% rename from js/src/utils/stats.js rename to js/src/stats/utils.js index d9c56f07c..3d1a584ab 100644 --- a/js/src/utils/stats.js +++ b/js/src/stats/utils.js @@ -9,7 +9,7 @@ const isBigInt = (n) => { return n.constructor.name === 'Big' } -module.exports.expectIsBitswap = (err, stats) => { +exports.expectIsBitswap = (err, stats) => { expect(err).to.not.exist() expect(stats).to.exist() expect(stats).to.have.a.property('provideBufLen') @@ -33,7 +33,7 @@ module.exports.expectIsBitswap = (err, stats) => { expect(isBigInt(stats.dupDataReceived)).to.eql(true) } -module.exports.expectIsBandwidth = (err, stats) => { +exports.expectIsBandwidth = (err, stats) => { expect(err).to.not.exist() expect(stats).to.exist() expect(stats).to.have.a.property('totalIn') @@ -46,7 +46,7 @@ module.exports.expectIsBandwidth = (err, stats) => { expect(isBigInt(stats.rateOut)).to.eql(true) } -module.exports.expectIsRepo = (err, res) => { +exports.expectIsRepo = (err, res) => { expect(err).to.not.exist() expect(res).to.exist() expect(res).to.have.a.property('numObjects') From f086eb42f63a90b204ee07543aa0129421ed2db6 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Thu, 7 Jun 2018 10:55:39 +0100 Subject: [PATCH 17/41] chore: move repeated fixtures into utils module License: MIT Signed-off-by: Alan Shaw --- js/src/files/add-pull-stream.js | 18 ++----- js/src/files/add-readable-stream.js | 18 ++----- js/src/files/add.js | 70 ++++++++++----------------- js/src/files/cat-pull-stream.js | 17 +++---- js/src/files/cat-readable-stream.js | 20 ++------ js/src/files/cat.js | 40 ++++++---------- js/src/files/get-pull-stream.js | 13 ++--- js/src/files/get-readable-stream.js | 13 ++--- js/src/files/get.js | 74 ++++++++++------------------- js/src/files/stat.js | 11 ++--- js/src/files/utils.js | 23 +++++++++ js/src/index.js | 1 - 12 files changed, 114 insertions(+), 204 deletions(-) create mode 100644 js/src/files/utils.js diff --git a/js/src/files/add-pull-stream.js b/js/src/files/add-pull-stream.js index d2395bc96..b8a16cc65 100644 --- a/js/src/files/add-pull-stream.js +++ b/js/src/files/add-pull-stream.js @@ -7,7 +7,7 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) -const loadFixture = require('aegir/fixtures') +const { fixtures } = require('./utils') const pull = require('pull-stream') const { getDescribe, getIt } = require('../utils/mocha') @@ -21,18 +21,6 @@ module.exports = (createCommon, options) => { let ipfs - const directory = { - cid: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', - files: { - 'pp.txt': loadFixture('js/test/fixtures/test-folder/pp.txt', 'interface-ipfs-core'), - 'holmes.txt': loadFixture('js/test/fixtures/test-folder/holmes.txt', 'interface-ipfs-core'), - 'jungle.txt': loadFixture('js/test/fixtures/test-folder/jungle.txt', 'interface-ipfs-core'), - 'alice.txt': loadFixture('js/test/fixtures/test-folder/alice.txt', 'interface-ipfs-core'), - 'files/hello.txt': loadFixture('js/test/fixtures/test-folder/files/hello.txt', 'interface-ipfs-core'), - 'files/ipfs.txt': loadFixture('js/test/fixtures/test-folder/files/ipfs.txt', 'interface-ipfs-core') - } - } - before(function (done) { // CI takes longer to instantiate the daemon, so we need to increase the // timeout for the before step @@ -53,7 +41,7 @@ module.exports = (createCommon, options) => { it('should add pull stream of valid files and dirs', function (done) { const content = (name) => ({ path: `test-folder/${name}`, - content: directory.files[name] + content: fixtures.directory.files[name] }) const emptyDir = (name) => ({ path: `test-folder/${name}` }) @@ -79,7 +67,7 @@ module.exports = (createCommon, options) => { filesAdded.forEach((file) => { if (file.path === 'test-folder') { - expect(file.hash).to.equal(directory.cid) + expect(file.hash).to.equal(fixtures.directory.cid) done() } }) diff --git a/js/src/files/add-readable-stream.js b/js/src/files/add-readable-stream.js index a36fbf860..60388a9e3 100644 --- a/js/src/files/add-readable-stream.js +++ b/js/src/files/add-readable-stream.js @@ -7,7 +7,7 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) -const loadFixture = require('aegir/fixtures') +const { fixtures } = require('./utils') const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -20,18 +20,6 @@ module.exports = (createCommon, options) => { let ipfs - const directory = { - cid: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', - files: { - 'pp.txt': loadFixture('js/test/fixtures/test-folder/pp.txt', 'interface-ipfs-core'), - 'holmes.txt': loadFixture('js/test/fixtures/test-folder/holmes.txt', 'interface-ipfs-core'), - 'jungle.txt': loadFixture('js/test/fixtures/test-folder/jungle.txt', 'interface-ipfs-core'), - 'alice.txt': loadFixture('js/test/fixtures/test-folder/alice.txt', 'interface-ipfs-core'), - 'files/hello.txt': loadFixture('js/test/fixtures/test-folder/files/hello.txt', 'interface-ipfs-core'), - 'files/ipfs.txt': loadFixture('js/test/fixtures/test-folder/files/ipfs.txt', 'interface-ipfs-core') - } - } - before(function (done) { // CI takes longer to instantiate the daemon, so we need to increase the // timeout for the before step @@ -52,7 +40,7 @@ module.exports = (createCommon, options) => { it('should add readable stream of valid files and dirs', function (done) { const content = (name) => ({ path: `test-folder/${name}`, - content: directory.files[name] + content: fixtures.directory.files[name] }) const emptyDir = (name) => ({ path: `test-folder/${name}` }) @@ -76,7 +64,7 @@ module.exports = (createCommon, options) => { stream.on('data', (file) => { if (file.path === 'test-folder') { - expect(file.hash).to.equal(directory.cid) + expect(file.hash).to.equal(fixtures.directory.cid) done() } }) diff --git a/js/src/files/add.js b/js/src/files/add.js index 7be377104..72eca0708 100644 --- a/js/src/files/add.js +++ b/js/src/files/add.js @@ -7,7 +7,7 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) -const loadFixture = require('aegir/fixtures') +const { fixtures } = require('./utils') const Readable = require('readable-stream').Readable const pull = require('pull-stream') const path = require('path') @@ -24,28 +24,6 @@ module.exports = (createCommon, options) => { let ipfs - const smallFile = { - cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', - data: loadFixture('js/test/fixtures/testfile.txt', 'interface-ipfs-core') - } - - const bigFile = { - cid: 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq', - data: loadFixture('js/test/fixtures/15mb.random', 'interface-ipfs-core') - } - - const directory = { - cid: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', - files: { - 'pp.txt': loadFixture('js/test/fixtures/test-folder/pp.txt', 'interface-ipfs-core'), - 'holmes.txt': loadFixture('js/test/fixtures/test-folder/holmes.txt', 'interface-ipfs-core'), - 'jungle.txt': loadFixture('js/test/fixtures/test-folder/jungle.txt', 'interface-ipfs-core'), - 'alice.txt': loadFixture('js/test/fixtures/test-folder/alice.txt', 'interface-ipfs-core'), - 'files/hello.txt': loadFixture('js/test/fixtures/test-folder/files/hello.txt', 'interface-ipfs-core'), - 'files/ipfs.txt': loadFixture('js/test/fixtures/test-folder/files/ipfs.txt', 'interface-ipfs-core') - } - } - before(function (done) { // CI takes longer to instantiate the daemon, so we need to increase the // timeout for the before step @@ -64,38 +42,38 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) it('should add a Buffer', (done) => { - ipfs.files.add(smallFile.data, (err, filesAdded) => { + ipfs.files.add(fixtures.smallFile.data, (err, filesAdded) => { expect(err).to.not.exist() expect(filesAdded).to.have.length(1) const file = filesAdded[0] - expect(file.hash).to.equal(smallFile.cid) - expect(file.path).to.equal(smallFile.cid) + expect(file.hash).to.equal(fixtures.smallFile.cid) + expect(file.path).to.equal(fixtures.smallFile.cid) // file.size counts the overhead by IPLD nodes and unixfs protobuf - expect(file.size).greaterThan(smallFile.data.length) + expect(file.size).greaterThan(fixtures.smallFile.data.length) done() }) }) it('should add a Buffer (promised)', () => { - return ipfs.files.add(smallFile.data) + return ipfs.files.add(fixtures.smallFile.data) .then((filesAdded) => { const file = filesAdded[0] - expect(file.hash).to.equal(smallFile.cid) - expect(file.path).to.equal(smallFile.cid) + expect(file.hash).to.equal(fixtures.smallFile.cid) + expect(file.path).to.equal(fixtures.smallFile.cid) }) }) it('should add a BIG Buffer', (done) => { - ipfs.files.add(bigFile.data, (err, filesAdded) => { + ipfs.files.add(fixtures.bigFile.data, (err, filesAdded) => { expect(err).to.not.exist() expect(filesAdded).to.have.length(1) const file = filesAdded[0] - expect(file.hash).to.equal(bigFile.cid) - expect(file.path).to.equal(bigFile.cid) + expect(file.hash).to.equal(fixtures.bigFile.cid) + expect(file.path).to.equal(fixtures.bigFile.cid) // file.size counts the overhead by IPLD nodes and unixfs protobuf - expect(file.size).greaterThan(bigFile.data.length) + expect(file.size).greaterThan(fixtures.bigFile.data.length) done() }) }) @@ -108,22 +86,22 @@ module.exports = (createCommon, options) => { accumProgress = p } - ipfs.files.add(bigFile.data, { progress: handler }, (err, filesAdded) => { + ipfs.files.add(fixtures.bigFile.data, { progress: handler }, (err, filesAdded) => { expect(err).to.not.exist() expect(filesAdded).to.have.length(1) const file = filesAdded[0] - expect(file.hash).to.equal(bigFile.cid) - expect(file.path).to.equal(bigFile.cid) + expect(file.hash).to.equal(fixtures.bigFile.cid) + expect(file.path).to.equal(fixtures.bigFile.cid) expect(progCalled).to.be.true() - expect(accumProgress).to.equal(bigFile.data.length) + expect(accumProgress).to.equal(fixtures.bigFile.data.length) done() }) }) it('should add a Buffer as tuple', (done) => { - const tuple = { path: 'testfile.txt', content: smallFile.data } + const tuple = { path: 'testfile.txt', content: fixtures.smallFile.data } ipfs.files.add([ tuple @@ -132,7 +110,7 @@ module.exports = (createCommon, options) => { expect(filesAdded).to.have.length(1) const file = filesAdded[0] - expect(file.hash).to.equal(smallFile.cid) + expect(file.hash).to.equal(fixtures.smallFile.cid) expect(file.path).to.equal('testfile.txt') done() @@ -222,7 +200,7 @@ module.exports = (createCommon, options) => { it('should add a nested directory as array of tupples', function (done) { const content = (name) => ({ path: `test-folder/${name}`, - content: directory.files[name] + content: fixtures.directory.files[name] }) const emptyDir = (name) => ({ path: `test-folder/${name}` }) @@ -243,7 +221,7 @@ module.exports = (createCommon, options) => { const root = res[res.length - 1] expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(directory.cid) + expect(root.hash).to.equal(fixtures.directory.cid) done() }) }) @@ -251,7 +229,7 @@ module.exports = (createCommon, options) => { it('should add a nested directory as array of tupples with progress', function (done) { const content = (name) => ({ path: `test-folder/${name}`, - content: directory.files[name] + content: fixtures.directory.files[name] }) const emptyDir = (name) => ({ path: `test-folder/${name}` }) @@ -285,7 +263,7 @@ module.exports = (createCommon, options) => { expect(progCalled).to.be.true() expect(accumProgress).to.be.at.least(total) expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(directory.cid) + expect(root.hash).to.equal(fixtures.directory.cid) done() }) }) @@ -300,14 +278,14 @@ module.exports = (createCommon, options) => { }) it('should wrap content in a directory', (done) => { - const data = { path: 'testfile.txt', content: smallFile.data } + const data = { path: 'testfile.txt', content: fixtures.smallFile.data } ipfs.files.add(data, { wrapWithDirectory: true }, (err, filesAdded) => { expect(err).to.not.exist() expect(filesAdded).to.have.length(2) const file = filesAdded[0] const wrapped = filesAdded[1] - expect(file.hash).to.equal(smallFile.cid) + expect(file.hash).to.equal(fixtures.smallFile.cid) expect(file.path).to.equal('testfile.txt') expect(wrapped.path).to.equal('') done() diff --git a/js/src/files/cat-pull-stream.js b/js/src/files/cat-pull-stream.js index 4dde40f7e..0539d0b27 100644 --- a/js/src/files/cat-pull-stream.js +++ b/js/src/files/cat-pull-stream.js @@ -7,7 +7,7 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) -const loadFixture = require('aegir/fixtures') +const { fixtures } = require('./utils') const pull = require('pull-stream') const { getDescribe, getIt } = require('../utils/mocha') @@ -21,11 +21,6 @@ module.exports = (createCommon, options) => { let ipfs - const smallFile = { - cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', - data: loadFixture('js/test/fixtures/testfile.txt', 'interface-ipfs-core') - } - before(function (done) { // CI takes longer to instantiate the daemon, so we need to increase the // timeout for the before step @@ -41,19 +36,19 @@ module.exports = (createCommon, options) => { }) }) - before((done) => ipfs.files.add(smallFile.data, done)) + before((done) => ipfs.files.add(fixtures.smallFile.data, done)) after((done) => common.teardown(done)) it('should return a Pull Stream for a CID', (done) => { - const stream = ipfs.files.catPullStream(smallFile.cid) + const stream = ipfs.files.catPullStream(fixtures.smallFile.cid) pull( stream, pull.concat((err, data) => { expect(err).to.not.exist() - expect(data.length).to.equal(smallFile.data.length) - expect(data).to.eql(smallFile.data.toString()) + expect(data.length).to.equal(fixtures.smallFile.data.length) + expect(data).to.eql(fixtures.smallFile.data.toString()) done() }) ) @@ -63,7 +58,7 @@ module.exports = (createCommon, options) => { const offset = 1 const length = 3 - const stream = ipfs.files.catPullStream(smallFile.cid, { + const stream = ipfs.files.catPullStream(fixtures.smallFile.cid, { offset, length }) diff --git a/js/src/files/cat-readable-stream.js b/js/src/files/cat-readable-stream.js index 5c81fb508..c7d98d821 100644 --- a/js/src/files/cat-readable-stream.js +++ b/js/src/files/cat-readable-stream.js @@ -7,7 +7,7 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) -const loadFixture = require('aegir/fixtures') +const { fixtures } = require('./utils') const bl = require('bl') const { getDescribe, getIt } = require('../utils/mocha') @@ -21,16 +21,6 @@ module.exports = (createCommon, options) => { let ipfs - const smallFile = { - cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', - data: loadFixture('js/test/fixtures/testfile.txt', 'interface-ipfs-core') - } - - const bigFile = { - cid: 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq', - data: loadFixture('js/test/fixtures/15mb.random', 'interface-ipfs-core') - } - before(function (done) { // CI takes longer to instantiate the daemon, so we need to increase the // timeout for the before step @@ -46,16 +36,16 @@ module.exports = (createCommon, options) => { }) }) - before((done) => ipfs.files.add(bigFile.data, done)) + before((done) => ipfs.files.add(fixtures.bigFile.data, done)) after((done) => common.teardown(done)) it('should return a Readable Stream for a CID', (done) => { - const stream = ipfs.files.catReadableStream(bigFile.cid) + const stream = ipfs.files.catReadableStream(fixtures.bigFile.cid) stream.pipe(bl((err, data) => { expect(err).to.not.exist() - expect(data).to.eql(bigFile.data) + expect(data).to.eql(fixtures.bigFile.data) done() })) }) @@ -64,7 +54,7 @@ module.exports = (createCommon, options) => { const offset = 1 const length = 3 - const stream = ipfs.files.catReadableStream(smallFile.cid, { + const stream = ipfs.files.catReadableStream(fixtures.smallFile.cid, { offset, length }) diff --git a/js/src/files/cat.js b/js/src/files/cat.js index ba13756b1..d7d1d7ab3 100644 --- a/js/src/files/cat.js +++ b/js/src/files/cat.js @@ -7,7 +7,7 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) -const loadFixture = require('aegir/fixtures') +const { fixtures } = require('./utils') const bs58 = require('bs58') const parallel = require('async/parallel') const CID = require('cids') @@ -23,16 +23,6 @@ module.exports = (createCommon, options) => { let ipfs - const smallFile = { - cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', - data: loadFixture('js/test/fixtures/testfile.txt', 'interface-ipfs-core') - } - - const bigFile = { - cid: 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq', - data: loadFixture('js/test/fixtures/15mb.random', 'interface-ipfs-core') - } - before(function (done) { // CI takes longer to instantiate the daemon, so we need to increase the // timeout for the before step @@ -52,13 +42,13 @@ module.exports = (createCommon, options) => { before((done) => { parallel([ - (cb) => ipfs.files.add(smallFile.data, cb), - (cb) => ipfs.files.add(bigFile.data, cb) + (cb) => ipfs.files.add(fixtures.smallFile.data, cb), + (cb) => ipfs.files.add(fixtures.bigFile.data, cb) ], done) }) it('should cat with a base58 string encoded multihash', (done) => { - ipfs.files.cat(smallFile.cid, (err, data) => { + ipfs.files.cat(fixtures.smallFile.cid, (err, data) => { expect(err).to.not.exist() expect(data.toString()).to.contain('Plz add me!') done() @@ -66,14 +56,14 @@ module.exports = (createCommon, options) => { }) it('should cat with a base58 string encoded multihash (promised)', () => { - return ipfs.files.cat(smallFile.cid) + return ipfs.files.cat(fixtures.smallFile.cid) .then((data) => { expect(data.toString()).to.contain('Plz add me!') }) }) it('should cat with a Buffer multihash', (done) => { - const cid = Buffer.from(bs58.decode(smallFile.cid)) + const cid = Buffer.from(bs58.decode(fixtures.smallFile.cid)) ipfs.files.cat(cid, (err, data) => { expect(err).to.not.exist() @@ -83,7 +73,7 @@ module.exports = (createCommon, options) => { }) it('should cat with a CID object', (done) => { - const cid = new CID(smallFile.cid) + const cid = new CID(fixtures.smallFile.cid) ipfs.files.cat(cid, (err, data) => { expect(err).to.not.exist() @@ -93,16 +83,16 @@ module.exports = (createCommon, options) => { }) it('should cat a BIG file', (done) => { - ipfs.files.cat(bigFile.cid, (err, data) => { + ipfs.files.cat(fixtures.bigFile.cid, (err, data) => { expect(err).to.not.exist() - expect(data.length).to.equal(bigFile.data.length) - expect(data).to.eql(bigFile.data) + expect(data.length).to.equal(fixtures.bigFile.data.length) + expect(data).to.eql(fixtures.bigFile.data) done() }) }) it('should cat with IPFS path', (done) => { - const ipfsPath = '/ipfs/' + smallFile.cid + const ipfsPath = '/ipfs/' + fixtures.smallFile.cid ipfs.files.cat(ipfsPath, (err, data) => { expect(err).to.not.exist() @@ -112,7 +102,7 @@ module.exports = (createCommon, options) => { }) it('should cat with IPFS path, nested value', (done) => { - const file = { path: 'a/testfile.txt', content: smallFile.data } + const file = { path: 'a/testfile.txt', content: fixtures.smallFile.data } ipfs.files.add([file], (err, filesAdded) => { expect(err).to.not.exist() @@ -147,7 +137,7 @@ module.exports = (createCommon, options) => { }) it('should error on unknown path (promised)', () => { - return ipfs.files.cat(smallFile.cid + '/does-not-exist') + return ipfs.files.cat(fixtures.smallFile.cid + '/does-not-exist') .catch((err) => { expect(err).to.exist() expect(err.message).to.oneOf([ @@ -157,7 +147,7 @@ module.exports = (createCommon, options) => { }) it('should error on dir path (promised)', () => { - const file = { path: 'dir/testfile.txt', content: smallFile.data } + const file = { path: 'dir/testfile.txt', content: fixtures.smallFile.data } return ipfs.files.add([file]) .then((filesAdded) => { @@ -177,7 +167,7 @@ module.exports = (createCommon, options) => { const offset = 1 const length = 3 - ipfs.files.cat(smallFile.cid, { + ipfs.files.cat(fixtures.smallFile.cid, { offset, length }, (err, data) => { diff --git a/js/src/files/get-pull-stream.js b/js/src/files/get-pull-stream.js index 1ff79389d..d4ccacd40 100644 --- a/js/src/files/get-pull-stream.js +++ b/js/src/files/get-pull-stream.js @@ -7,7 +7,7 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) -const loadFixture = require('aegir/fixtures') +const { fixtures } = require('./utils') const pull = require('pull-stream') const { getDescribe, getIt } = require('../utils/mocha') @@ -21,11 +21,6 @@ module.exports = (createCommon, options) => { let ipfs - const smallFile = { - cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', - data: loadFixture('js/test/fixtures/testfile.txt', 'interface-ipfs-core') - } - before(function (done) { // CI takes longer to instantiate the daemon, so we need to increase the // timeout for the before step @@ -41,19 +36,19 @@ module.exports = (createCommon, options) => { }) }) - before((done) => ipfs.files.add(smallFile.data, done)) + before((done) => ipfs.files.add(fixtures.smallFile.data, done)) after((done) => common.teardown(done)) it('should return a Pull Stream of Pull Streams', (done) => { - const stream = ipfs.files.getPullStream(smallFile.cid) + const stream = ipfs.files.getPullStream(fixtures.smallFile.cid) pull( stream, pull.collect((err, files) => { expect(err).to.not.exist() expect(files).to.be.length(1) - expect(files[0].path).to.eql(smallFile.cid) + expect(files[0].path).to.eql(fixtures.smallFile.cid) pull( files[0].content, pull.concat((err, data) => { diff --git a/js/src/files/get-readable-stream.js b/js/src/files/get-readable-stream.js index d7517f24b..92d17f0f2 100644 --- a/js/src/files/get-readable-stream.js +++ b/js/src/files/get-readable-stream.js @@ -7,7 +7,7 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) -const loadFixture = require('aegir/fixtures') +const { fixtures } = require('./utils') const concat = require('concat-stream') const through = require('through2') const { getDescribe, getIt } = require('../utils/mocha') @@ -22,11 +22,6 @@ module.exports = (createCommon, options) => { let ipfs - const smallFile = { - cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', - data: loadFixture('js/test/fixtures/testfile.txt', 'interface-ipfs-core') - } - before(function (done) { // CI takes longer to instantiate the daemon, so we need to increase the // timeout for the before step @@ -42,12 +37,12 @@ module.exports = (createCommon, options) => { }) }) - before((done) => ipfs.files.add(smallFile.data, done)) + before((done) => ipfs.files.add(fixtures.smallFile.data, done)) after((done) => common.teardown(done)) it('should return a Readable Stream of Readable Streams', (done) => { - const stream = ipfs.files.getReadableStream(smallFile.cid) + const stream = ipfs.files.getReadableStream(fixtures.smallFile.cid) let files = [] stream.pipe(through.obj((file, enc, next) => { @@ -57,7 +52,7 @@ module.exports = (createCommon, options) => { })) }, () => { expect(files).to.be.length(1) - expect(files[0].path).to.eql(smallFile.cid) + expect(files[0].path).to.eql(fixtures.smallFile.cid) expect(files[0].content.toString()).to.contain('Plz add me!') done() })) diff --git a/js/src/files/get.js b/js/src/files/get.js index 497e83515..285f0a874 100644 --- a/js/src/files/get.js +++ b/js/src/files/get.js @@ -7,7 +7,7 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) -const loadFixture = require('aegir/fixtures') +const { fixtures } = require('./utils') const bs58 = require('bs58') const parallel = require('async/parallel') const series = require('async/series') @@ -23,32 +23,6 @@ module.exports = (createCommon, options) => { let ipfs - function fixture (path) { - return loadFixture(path, 'interface-ipfs-core') - } - - const smallFile = { - cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', - data: fixture('js/test/fixtures/testfile.txt') - } - - const bigFile = { - cid: 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq', - data: fixture('js/test/fixtures/15mb.random') - } - - const directory = { - cid: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', - files: { - 'pp.txt': fixture('js/test/fixtures/test-folder/pp.txt'), - 'holmes.txt': fixture('js/test/fixtures/test-folder/holmes.txt'), - 'jungle.txt': fixture('js/test/fixtures/test-folder/jungle.txt'), - 'alice.txt': fixture('js/test/fixtures/test-folder/alice.txt'), - 'files/hello.txt': fixture('js/test/fixtures/test-folder/files/hello.txt'), - 'files/ipfs.txt': fixture('js/test/fixtures/test-folder/files/ipfs.txt') - } - } - before(function (done) { // CI takes longer to instantiate the daemon, so we need to increase the // timeout for the before step @@ -66,53 +40,53 @@ module.exports = (createCommon, options) => { before((done) => { parallel([ - (cb) => ipfs.files.add(smallFile.data, cb), - (cb) => ipfs.files.add(bigFile.data, cb) + (cb) => ipfs.files.add(fixtures.smallFile.data, cb), + (cb) => ipfs.files.add(fixtures.bigFile.data, cb) ], done) }) after((done) => common.teardown(done)) it('should get with a base58 encoded multihash', (done) => { - ipfs.files.get(smallFile.cid, (err, files) => { + ipfs.files.get(fixtures.smallFile.cid, (err, files) => { expect(err).to.not.exist() expect(files).to.be.length(1) - expect(files[0].path).to.eql(smallFile.cid) + expect(files[0].path).to.eql(fixtures.smallFile.cid) expect(files[0].content.toString('utf8')).to.contain('Plz add me!') done() }) }) it('should get with a base58 encoded multihash (promised)', () => { - return ipfs.files.get(smallFile.cid) + return ipfs.files.get(fixtures.smallFile.cid) .then((files) => { expect(files).to.be.length(1) - expect(files[0].path).to.equal(smallFile.cid) + expect(files[0].path).to.equal(fixtures.smallFile.cid) expect(files[0].content.toString()).to.contain('Plz add me!') }) }) it('should get with a Buffer multihash', (done) => { - const cidBuf = Buffer.from(bs58.decode(smallFile.cid)) + const cidBuf = Buffer.from(bs58.decode(fixtures.smallFile.cid)) ipfs.files.get(cidBuf, (err, files) => { expect(err).to.not.exist() expect(files).to.be.length(1) - expect(files[0].path).to.eql(smallFile.cid) + expect(files[0].path).to.eql(fixtures.smallFile.cid) expect(files[0].content.toString('utf8')).to.contain('Plz add me!') done() }) }) it('should get a BIG file', (done) => { - ipfs.files.get(bigFile.cid, (err, files) => { + ipfs.files.get(fixtures.bigFile.cid, (err, files) => { expect(err).to.not.exist() expect(files.length).to.equal(1) - expect(files[0].path).to.equal(bigFile.cid) - expect(files[0].content.length).to.eql(bigFile.data.length) - expect(files[0].content).to.eql(bigFile.data) + expect(files[0].path).to.equal(fixtures.bigFile.cid) + expect(files[0].content.length).to.eql(fixtures.bigFile.data.length) + expect(files[0].content).to.eql(fixtures.bigFile.data) done() }) }) @@ -122,7 +96,7 @@ module.exports = (createCommon, options) => { (cb) => { const content = (name) => ({ path: `test-folder/${name}`, - content: directory.files[name] + content: fixtures.directory.files[name] }) const emptyDir = (name) => ({ path: `test-folder/${name}` }) @@ -143,12 +117,12 @@ module.exports = (createCommon, options) => { const root = res[res.length - 1] expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(directory.cid) + expect(root.hash).to.equal(fixtures.directory.cid) cb() }) }, (cb) => { - ipfs.files.get(directory.cid, (err, files) => { + ipfs.files.get(fixtures.directory.cid, (err, files) => { expect(err).to.not.exist() files = files.sort((a, b) => { @@ -180,12 +154,12 @@ module.exports = (createCommon, options) => { }) expect(contents).to.include.members([ - directory.files['alice.txt'].toString(), - directory.files['files/hello.txt'].toString(), - directory.files['files/ipfs.txt'].toString(), - directory.files['holmes.txt'].toString(), - directory.files['jungle.txt'].toString(), - directory.files['pp.txt'].toString() + fixtures.directory.files['alice.txt'].toString(), + fixtures.directory.files['files/hello.txt'].toString(), + fixtures.directory.files['files/ipfs.txt'].toString(), + fixtures.directory.files['holmes.txt'].toString(), + fixtures.directory.files['jungle.txt'].toString(), + fixtures.directory.files['pp.txt'].toString() ]) cb() }) @@ -196,7 +170,7 @@ module.exports = (createCommon, options) => { it('should get with ipfs path, as object and nested value', (done) => { const file = { path: 'a/testfile.txt', - content: smallFile.data + content: fixtures.smallFile.data } ipfs.files.add(file, (err, filesAdded) => { @@ -218,7 +192,7 @@ module.exports = (createCommon, options) => { it('should get with ipfs path, as array and nested value', (done) => { const file = { path: 'a/testfile.txt', - content: smallFile.data + content: fixtures.smallFile.data } ipfs.files.add([file], (err, filesAdded) => { diff --git a/js/src/files/stat.js b/js/src/files/stat.js index 01d0708d0..28c5dde64 100644 --- a/js/src/files/stat.js +++ b/js/src/files/stat.js @@ -6,7 +6,7 @@ const chai = require('chai') const series = require('async/series') const hat = require('hat') -const loadFixture = require('aegir/fixtures') +const { fixtures } = require('./utils') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) @@ -22,11 +22,6 @@ module.exports = (createCommon, options) => { let ipfs - const smallFile = { - cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', - data: loadFixture('js/test/fixtures/testfile.txt', 'interface-ipfs-core') - } - before(function (done) { // CI takes longer to instantiate the daemon, so we need to increase the // timeout for the before step @@ -42,7 +37,7 @@ module.exports = (createCommon, options) => { }) }) - before((done) => ipfs.files.add(smallFile.data, done)) + before((done) => ipfs.files.add(fixtures.smallFile.data, done)) after((done) => common.teardown(done)) @@ -145,7 +140,7 @@ module.exports = (createCommon, options) => { // TODO: (achingbrain) - Not yet supported in js-ipfs or go-ipfs yet') it.skip('should stat outside of mfs', function (done) { - ipfs.files.stat('/ipfs/' + smallFile.cid, (err, stat) => { + ipfs.files.stat('/ipfs/' + fixtures.smallFile.cid, (err, stat) => { expect(err).to.not.exist() expect(stat).to.eql({ type: 'file', diff --git a/js/src/files/utils.js b/js/src/files/utils.js new file mode 100644 index 000000000..28bd24570 --- /dev/null +++ b/js/src/files/utils.js @@ -0,0 +1,23 @@ +const loadFixture = require('aegir/fixtures') + +exports.fixtures = Object.freeze({ + directory: Object.freeze({ + cid: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', + files: Object.freeze({ + 'pp.txt': loadFixture('js/test/fixtures/test-folder/pp.txt', 'interface-ipfs-core'), + 'holmes.txt': loadFixture('js/test/fixtures/test-folder/holmes.txt', 'interface-ipfs-core'), + 'jungle.txt': loadFixture('js/test/fixtures/test-folder/jungle.txt', 'interface-ipfs-core'), + 'alice.txt': loadFixture('js/test/fixtures/test-folder/alice.txt', 'interface-ipfs-core'), + 'files/hello.txt': loadFixture('js/test/fixtures/test-folder/files/hello.txt', 'interface-ipfs-core'), + 'files/ipfs.txt': loadFixture('js/test/fixtures/test-folder/files/ipfs.txt', 'interface-ipfs-core') + }) + }), + smallFile: Object.freeze({ + cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', + data: loadFixture('js/test/fixtures/testfile.txt', 'interface-ipfs-core') + }), + bigFile: Object.freeze({ + cid: 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq', + data: loadFixture('js/test/fixtures/15mb.random', 'interface-ipfs-core') + }) +}) diff --git a/js/src/index.js b/js/src/index.js index 1bcf026c1..87721dafc 100644 --- a/js/src/index.js +++ b/js/src/index.js @@ -8,7 +8,6 @@ exports.dht = require('./dht') exports.files = require('./files') exports.key = require('./key') exports.ls = require('./ls') -exports.generic = require('./miscellaneous') exports.miscellaneous = require('./miscellaneous') exports.object = require('./object') exports.pin = require('./pin') From f634c148c7417890fbbbfcb9411970e0a6b35fa2 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Thu, 7 Jun 2018 10:59:10 +0100 Subject: [PATCH 18/41] chore: use files fixtures in ls tests License: MIT Signed-off-by: Alan Shaw --- js/src/ls/ls-pull-stream.js | 18 +++--------------- js/src/ls/ls-readable-stream.js | 22 +++------------------- js/src/ls/ls.js | 18 +++--------------- 3 files changed, 9 insertions(+), 49 deletions(-) diff --git a/js/src/ls/ls-pull-stream.js b/js/src/ls/ls-pull-stream.js index 7bbf2e6b7..7a00e2d3d 100644 --- a/js/src/ls/ls-pull-stream.js +++ b/js/src/ls/ls-pull-stream.js @@ -7,7 +7,7 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) -const loadFixture = require('aegir/fixtures') +const { fixtures } = require('../files/utils') const pull = require('pull-stream') const { getDescribe, getIt } = require('../utils/mocha') @@ -21,18 +21,6 @@ module.exports = (createCommon, options) => { let ipfs - const directory = { - cid: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', - files: { - 'pp.txt': loadFixture('js/test/fixtures/test-folder/pp.txt', 'interface-ipfs-core'), - 'holmes.txt': loadFixture('js/test/fixtures/test-folder/holmes.txt', 'interface-ipfs-core'), - 'jungle.txt': loadFixture('js/test/fixtures/test-folder/jungle.txt', 'interface-ipfs-core'), - 'alice.txt': loadFixture('js/test/fixtures/test-folder/alice.txt', 'interface-ipfs-core'), - 'files/hello.txt': loadFixture('js/test/fixtures/test-folder/files/hello.txt', 'interface-ipfs-core'), - 'files/ipfs.txt': loadFixture('js/test/fixtures/test-folder/files/ipfs.txt', 'interface-ipfs-core') - } - } - before(function (done) { // CI takes longer to instantiate the daemon, so we need to increase the // timeout for the before step @@ -53,7 +41,7 @@ module.exports = (createCommon, options) => { it('should pull stream ls with a base58 encoded CID', function (done) { const content = (name) => ({ path: `test-folder/${name}`, - content: directory.files[name] + content: fixtures.directory.files[name] }) const emptyDir = (name) => ({ path: `test-folder/${name}` }) @@ -74,7 +62,7 @@ module.exports = (createCommon, options) => { const root = res[res.length - 1] expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(directory.cid) + expect(root.hash).to.equal(fixtures.directory.cid) const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' const stream = ipfs.lsPullStream(cid) diff --git a/js/src/ls/ls-readable-stream.js b/js/src/ls/ls-readable-stream.js index faab61fb8..ef75b0816 100644 --- a/js/src/ls/ls-readable-stream.js +++ b/js/src/ls/ls-readable-stream.js @@ -7,7 +7,7 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) -const loadFixture = require('aegir/fixtures') +const { fixtures } = require('../files/utils') const concat = require('concat-stream') const { getDescribe, getIt } = require('../utils/mocha') @@ -21,22 +21,6 @@ module.exports = (createCommon, options) => { let ipfs - function fixture (path) { - return loadFixture(path, 'interface-ipfs-core') - } - - const directory = { - cid: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', - files: { - 'pp.txt': fixture('js/test/fixtures/test-folder/pp.txt'), - 'holmes.txt': fixture('js/test/fixtures/test-folder/holmes.txt'), - 'jungle.txt': fixture('js/test/fixtures/test-folder/jungle.txt'), - 'alice.txt': fixture('js/test/fixtures/test-folder/alice.txt'), - 'files/hello.txt': fixture('js/test/fixtures/test-folder/files/hello.txt'), - 'files/ipfs.txt': fixture('js/test/fixtures/test-folder/files/ipfs.txt') - } - } - before(function (done) { // CI takes longer to instantiate the daemon, so we need to increase the // timeout for the before step @@ -57,7 +41,7 @@ module.exports = (createCommon, options) => { it('should readable stream ls with a base58 encoded CID', function (done) { const content = (name) => ({ path: `test-folder/${name}`, - content: directory.files[name] + content: fixtures.directory.files[name] }) const emptyDir = (name) => ({ path: `test-folder/${name}` }) @@ -78,7 +62,7 @@ module.exports = (createCommon, options) => { const root = res[res.length - 1] expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(directory.cid) + expect(root.hash).to.equal(fixtures.directory.cid) const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' const stream = ipfs.lsReadableStream(cid) diff --git a/js/src/ls/ls.js b/js/src/ls/ls.js index e8c939e33..3883b7bba 100644 --- a/js/src/ls/ls.js +++ b/js/src/ls/ls.js @@ -7,7 +7,7 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) -const loadFixture = require('aegir/fixtures') +const { fixtures } = require('../files/utils') const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -20,18 +20,6 @@ module.exports = (createCommon, options) => { let ipfs - const directory = { - cid: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', - files: { - 'pp.txt': loadFixture('js/test/fixtures/test-folder/pp.txt', 'interface-ipfs-core'), - 'holmes.txt': loadFixture('js/test/fixtures/test-folder/holmes.txt', 'interface-ipfs-core'), - 'jungle.txt': loadFixture('js/test/fixtures/test-folder/jungle.txt', 'interface-ipfs-core'), - 'alice.txt': loadFixture('js/test/fixtures/test-folder/alice.txt', 'interface-ipfs-core'), - 'files/hello.txt': loadFixture('js/test/fixtures/test-folder/files/hello.txt', 'interface-ipfs-core'), - 'files/ipfs.txt': loadFixture('js/test/fixtures/test-folder/files/ipfs.txt', 'interface-ipfs-core') - } - } - before(function (done) { // CI takes longer to instantiate the daemon, so we need to increase the // timeout for the before step @@ -52,7 +40,7 @@ module.exports = (createCommon, options) => { it('should ls with a base58 encoded CID', function (done) { const content = (name) => ({ path: `test-folder/${name}`, - content: directory.files[name] + content: fixtures.directory.files[name] }) const emptyDir = (name) => ({ path: `test-folder/${name}` }) @@ -73,7 +61,7 @@ module.exports = (createCommon, options) => { const root = res[res.length - 1] expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(directory.cid) + expect(root.hash).to.equal(fixtures.directory.cid) const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' ipfs.ls(cid, (err, files) => { From b42a54b70ac1717d305b2449ab3acc3c75b1e6ef Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Thu, 7 Jun 2018 11:10:25 +0100 Subject: [PATCH 19/41] feat: modularised types and util License: MIT Signed-off-by: Alan Shaw --- js/src/types.js | 15 +++++++++------ js/src/util.js | 14 +++++++++----- 2 files changed, 18 insertions(+), 11 deletions(-) diff --git a/js/src/types.js b/js/src/types.js index 070b40034..de68421e0 100644 --- a/js/src/types.js +++ b/js/src/types.js @@ -9,13 +9,18 @@ const multiaddr = require('multiaddr') const multibase = require('multibase') const multihash = require('multihashes') const CID = require('cids') - const chai = require('chai') const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('./utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -module.exports = (common) => { +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + describe('.types', function () { let ipfs @@ -34,11 +39,9 @@ module.exports = (common) => { }) }) - after((done) => { - common.teardown(done) - }) + after((done) => common.teardown(done)) - it('types object', () => { + it('should have a types object with the required values', () => { expect(ipfs.types).to.be.deep.equal({ Buffer: Buffer, PeerId: PeerId, diff --git a/js/src/util.js b/js/src/util.js index b64ef2b3c..250384bd8 100644 --- a/js/src/util.js +++ b/js/src/util.js @@ -5,10 +5,16 @@ const crypto = require('libp2p-crypto') const isIPFS = require('is-ipfs') const chai = require('chai') const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('./utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -module.exports = (common) => { +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + describe('.util', function () { let ipfs @@ -27,11 +33,9 @@ module.exports = (common) => { }) }) - after((done) => { - common.teardown(done) - }) + after((done) => common.teardown(done)) - it('util object', () => { + it('should have a util object with the required values', () => { expect(ipfs.util).to.be.deep.equal({ crypto: crypto, isIPFS: isIPFS From 9318dbcfd69632ad3a1d8c027934a9160aaaff98 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Thu, 7 Jun 2018 18:07:03 +0100 Subject: [PATCH 20/41] feat: modularises object.patch* and object.new License: MIT Signed-off-by: Alan Shaw --- js/src/object.js | 1003 ---------------------------- js/src/object/index.js | 14 + js/src/object/new.js | 67 ++ js/src/object/patch/add-link.js | 186 ++++++ js/src/object/patch/append-data.js | 73 ++ js/src/object/patch/index.js | 11 + js/src/object/patch/rm-link.js | 131 ++++ js/src/object/patch/set-data.js | 73 ++ js/src/utils/suite.js | 7 +- 9 files changed, 559 insertions(+), 1006 deletions(-) delete mode 100644 js/src/object.js create mode 100644 js/src/object/index.js create mode 100644 js/src/object/new.js create mode 100644 js/src/object/patch/add-link.js create mode 100644 js/src/object/patch/append-data.js create mode 100644 js/src/object/patch/index.js create mode 100644 js/src/object/patch/rm-link.js create mode 100644 js/src/object/patch/set-data.js diff --git a/js/src/object.js b/js/src/object.js deleted file mode 100644 index 5bdb40f7d..000000000 --- a/js/src/object.js +++ /dev/null @@ -1,1003 +0,0 @@ -/* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - -'use strict' - -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) -const dagPB = require('ipld-dag-pb') -const DAGNode = dagPB.DAGNode -const bs58 = require('bs58') -const series = require('async/series') - -module.exports = (common) => { - describe('.object', function () { - this.timeout(80 * 1000) - - let ipfs - - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => { - common.teardown(done) - }) - - describe('callback API', () => { - describe('.new', () => { - it('no layout', (done) => { - ipfs.object.new((err, node) => { - expect(err).to.not.exist() - const nodeJSON = node.toJSON() - expect(nodeJSON.multihash).to.equal('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') - done() - }) - }) - - it('template unixfs-dir', (done) => { - ipfs.object.new('unixfs-dir', (err, node) => { - expect(err).to.not.exist() - const nodeJSON = node.toJSON() - expect(nodeJSON.multihash) - .to.equal('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') - done() - }) - }) - }) - - describe('.put', () => { - it('of object', (done) => { - const obj = { - Data: Buffer.from('Some data'), - Links: [] - } - - ipfs.object.put(obj, (err, node) => { - expect(err).to.not.exist() - const nodeJSON = node.toJSON() - expect(nodeJSON.data).to.eql(obj.Data) - expect(nodeJSON.links).to.eql(obj.Links) - expect(nodeJSON.multihash).to.equal('QmPb5f92FxKPYdT3QNBd1GKiL4tZUXUrzF4Hkpdr3Gf1gK') - done() - }) - }) - - it('of json encoded buffer', (done) => { - const obj = { - Data: Buffer.from('Some data'), - Links: [] - } - - const obj2 = { - Data: obj.Data.toString(), - Links: obj.Links - } - - const buf = Buffer.from(JSON.stringify(obj2)) - - ipfs.object.put(buf, { enc: 'json' }, (err, node) => { - expect(err).to.not.exist() - const nodeJSON = node.toJSON() - - expect(nodeJSON.data).to.eql(node.data) - expect(nodeJSON.multihash).to.equal('QmPb5f92FxKPYdT3QNBd1GKiL4tZUXUrzF4Hkpdr3Gf1gK') - done() - }) - }) - - it('of protobuf encoded buffer', (done) => { - let node - let serialized - - series([ - (cb) => { - DAGNode.create(Buffer.from('Some data'), (err, _node) => { - expect(err).to.not.exist() - node = _node - cb() - }) - }, - (cb) => { - dagPB.util.serialize(node, (err, _serialized) => { - expect(err).to.not.exist() - serialized = _serialized - cb() - }) - }, - (cb) => { - ipfs.object.put(serialized, { enc: 'protobuf' }, (err, storedNode) => { - expect(err).to.not.exist() - expect(node.data).to.deep.equal(node.data) - expect(node.links).to.deep.equal(node.links) - expect(node.multihash).to.eql(storedNode.multihash) - cb() - }) - } - ], done) - }) - - it('of buffer treated as Data field', (done) => { - const data = Buffer.from('Some data') - ipfs.object.put(data, (err, node) => { - expect(err).to.not.exist() - const nodeJSON = node.toJSON() - expect(data).to.deep.equal(nodeJSON.data) - expect([]).to.deep.equal(nodeJSON.links) - expect(nodeJSON.multihash).to.equal('QmPb5f92FxKPYdT3QNBd1GKiL4tZUXUrzF4Hkpdr3Gf1gK') - done() - }) - }) - - it('of DAGNode', (done) => { - DAGNode.create(Buffer.from('Some data'), (err, dNode) => { - expect(err).to.not.exist() - ipfs.object.put(dNode, (err, node) => { - expect(err).to.not.exist() - expect(dNode.data).to.deep.equal(node.data) - expect(dNode.links).to.deep.equal(node.links) - done() - }) - }) - }) - - it('fails if String is passed', (done) => { - ipfs.object.put('aaa', (err) => { - expect(err).to.exist() - done() - }) - }) - - it('DAGNode with a link', (done) => { - let node1a - let node1b - let node2 - series([ - (cb) => { - DAGNode.create(Buffer.from('Some data 1'), (err, node) => { - expect(err).to.not.exist() - node1a = node - cb() - }) - }, - (cb) => { - DAGNode.create(Buffer.from('Some data 2'), (err, node) => { - expect(err).to.not.exist() - node2 = node - cb() - }) - }, - (cb) => { - const link = node2.toJSON() - link.name = 'some-link' - DAGNode.addLink(node1a, link, (err, node) => { - expect(err).to.not.exist() - node1b = node - cb() - }) - }, - (cb) => { - ipfs.object.put(node1b, (err, node) => { - expect(err).to.not.exist() - expect(node1b.data).to.deep.equal(node.data) - expect(node1b.links.map((l) => l.toJSON())) - .to.deep.equal(node.links.map((l) => l.toJSON())) - cb() - }) - } - ], done) - }) - }) - - describe('.get', () => { - it('with multihash', (done) => { - const obj = { - Data: Buffer.from('get test object'), - Links: [] - } - - let node1 - let node2 - - series([ - (cb) => { - ipfs.object.put(obj, (err, node) => { - expect(err).to.not.exist() - node1 = node - cb() - }) - }, - (cb) => { - ipfs.object.get(node1.multihash, (err, node) => { - expect(err).to.not.exist() - node2 = node - - // because js-ipfs-api can't infer if the - // returned Data is Buffer or String - if (typeof node2.data === 'string') { - node2.data = Buffer.from(node2.data) - } - cb() - }) - }, - (cb) => { - // get object from ipfs multihash string - ipfs.object.get(node1.toJSON().multihash, (err, node) => { - expect(err).to.not.exist() - expect(node).to.exist() - cb() - }) - }, - (cb) => { - expect(node1.data).to.eql(node2.data) - expect(node1.links).to.eql(node2.links) - expect(node1.multihash).to.eql(node2.multihash) - cb() - } - ], done) - }) - - it('with multihash (+ links)', (done) => { - let node1a - let node1b - let node1c - let node2 - - series([ - (cb) => { - DAGNode.create(Buffer.from('Some data 1'), (err, node) => { - expect(err).to.not.exist() - node1a = node - cb() - }) - }, - (cb) => { - DAGNode.create(Buffer.from('Some data 2'), (err, node) => { - expect(err).to.not.exist() - node2 = node - cb() - }) - }, - (cb) => { - const link = node2.toJSON() - link.name = 'some-link' - DAGNode.addLink(node1a, link, (err, node) => { - expect(err).to.not.exist() - node1b = node - cb() - }) - }, - (cb) => { - ipfs.object.put(node1b, cb) - }, - (cb) => { - ipfs.object.get(node1b.multihash, (err, node) => { - expect(err).to.not.exist() - - // because js-ipfs-api can't infer if the - // returned Data is Buffer or String - if (typeof node.data === 'string') { - node.data = Buffer.from(node.data) - } - - node1c = node - cb() - }) - }, - (cb) => { - expect(node1a.data).to.eql(node1c.data) - expect(node1b.multihash).to.eql(node1c.multihash) - cb() - } - ], done) - }) - - it('with multihash base58 encoded', (done) => { - const obj = { - Data: Buffer.from('get test object'), - Links: [] - } - - let node1a - let node1b - - series([ - (cb) => { - ipfs.object.put(obj, (err, node) => { - expect(err).to.not.exist() - node1a = node - cb() - }) - }, - (cb) => { - ipfs.object.get(node1a.multihash, { enc: 'base58' }, (err, node) => { - expect(err).to.not.exist() - // because js-ipfs-api can't infer if the - // returned Data is Buffer or String - if (typeof node.data === 'string') { - node.data = Buffer.from(node.data) - } - node1b = node - cb() - }) - }, - (cb) => { - expect(node1a.multihash).to.eql(node1b.multihash) - expect(node1a.data).to.eql(node1b.data) - expect(node1a.links).to.eql(node1b.links) - cb() - } - ], done) - }) - - it('with multihash base58 encoded toString', (done) => { - const obj = { - Data: Buffer.from('get test object'), - Links: [] - } - - let node1a - let node1b - - series([ - (cb) => { - ipfs.object.put(obj, (err, node) => { - expect(err).to.not.exist() - node1a = node - cb() - }) - }, - (cb) => { - ipfs.object.get(bs58.encode(node1a.multihash).toString(), { enc: 'base58' }, (err, node) => { - expect(err).to.not.exist() - // because js-ipfs-api can't infer if the - // returned Data is Buffer or String - if (typeof node.data === 'string') { - node.data = Buffer.from(node.data) - } - node1b = node - cb() - }) - }, - (cb) => { - expect(node1a.multihash).to.eql(node1b.multihash) - expect(node1a.data).to.eql(node1b.data) - expect(node1a.links).to.eql(node1b.links) - cb() - } - ], done) - }) - }) - - describe('.data', () => { - it('with multihash', (done) => { - const testObj = { - Data: Buffer.from('get test object'), - Links: [] - } - - ipfs.object.put(testObj, (err, node) => { - expect(err).to.not.exist() - - ipfs.object.data(node.multihash, (err, data) => { - expect(err).to.not.exist() - - // because js-ipfs-api can't infer - // if the returned Data is Buffer or String - if (typeof data === 'string') { - data = Buffer.from(data) - } - expect(node.data).to.eql(data) - done() - }) - }) - }) - - it('with multihash base58 encoded', (done) => { - const testObj = { - Data: Buffer.from('get test object'), - Links: [] - } - - ipfs.object.put(testObj, (err, node) => { - expect(err).to.not.exist() - - ipfs.object.data(bs58.encode(node.multihash), { enc: 'base58' }, (err, data) => { - expect(err).to.not.exist() - - // because js-ipfs-api can't infer - // if the returned Data is Buffer or String - if (typeof data === 'string') { - data = Buffer.from(data) - } - expect(node.data).to.eql(data) - done() - }) - }) - }) - - it('with multihash base58 encoded toString', (done) => { - const testObj = { - Data: Buffer.from('get test object'), - Links: [] - } - - ipfs.object.put(testObj, (err, node) => { - expect(err).to.not.exist() - - ipfs.object.data(bs58.encode(node.multihash).toString(), { enc: 'base58' }, (err, data) => { - expect(err).to.not.exist() - - // because js-ipfs-api can't infer if the - // returned Data is Buffer or String - if (typeof data === 'string') { - data = Buffer.from(data) - } - expect(node.data).to.eql(data) - done() - }) - }) - }) - }) - - describe('.links', () => { - it('object.links with multihash', (done) => { - const testObj = { - Data: Buffer.from('get test object'), - Links: [] - } - - ipfs.object.put(testObj, (err, node) => { - expect(err).to.not.exist() - - ipfs.object.links(node.multihash, (err, links) => { - expect(err).to.not.exist() - expect(node.links).to.deep.equal(links) - done() - }) - }) - }) - - it('with multihash (+ links)', (done) => { - let node1a - let node1b - let node2 - - series([ - (cb) => { - DAGNode.create(Buffer.from('Some data 1'), (err, node) => { - expect(err).to.not.exist() - node1a = node - cb() - }) - }, - (cb) => { - DAGNode.create(Buffer.from('Some data 2'), (err, node) => { - expect(err).to.not.exist() - node2 = node - cb() - }) - }, - (cb) => { - const link = node2.toJSON() - link.name = 'some-link' - - DAGNode.addLink(node1a, link, (err, node) => { - expect(err).to.not.exist() - node1b = node - cb() - }) - }, - (cb) => { - ipfs.object.put(node1b, cb) - }, - (cb) => { - ipfs.object.links(node1b.multihash, (err, links) => { - expect(err).to.not.exist() - expect(node1b.links[0].toJSON()).to.eql(links[0].toJSON()) - cb() - }) - } - ], done) - }) - - it('with multihash base58 encoded', (done) => { - const testObj = { - Data: Buffer.from('get test object'), - Links: [] - } - - ipfs.object.put(testObj, (err, node) => { - expect(err).to.not.exist() - - ipfs.object.links(bs58.encode(node.multihash), { enc: 'base58' }, (err, links) => { - expect(err).to.not.exist() - expect(node.links).to.deep.equal(links) - done() - }) - }) - }) - - it('with multihash base58 encoded toString', (done) => { - const testObj = { - Data: Buffer.from('get test object'), - Links: [] - } - - ipfs.object.put(testObj, (err, node) => { - expect(err).to.not.exist() - ipfs.object.links(bs58.encode(node.multihash), { enc: 'base58' }, (err, links) => { - expect(err).to.not.exist() - expect(node.links).to.deep.equal(links) - done() - }) - }) - }) - }) - - describe('.stat', () => { - it('with multihash', (done) => { - const testObj = { - Data: Buffer.from('get test object'), - Links: [] - } - - ipfs.object.put(testObj, (err, node) => { - expect(err).to.not.exist() - - ipfs.object.stat(node.multihash, (err, stats) => { - expect(err).to.not.exist() - const expected = { - Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', - NumLinks: 0, - BlockSize: 17, - LinksSize: 2, - DataSize: 15, - CumulativeSize: 17 - } - expect(expected).to.deep.equal(stats) - done() - }) - }) - }) - - it('with multihash (+ Links)', (done) => { - let node1a - let node1b - let node2 - - series([ - (cb) => { - DAGNode.create(Buffer.from('Some data 1'), (err, node) => { - expect(err).to.not.exist() - node1a = node - cb() - }) - }, - (cb) => { - DAGNode.create(Buffer.from('Some data 2'), (err, node) => { - expect(err).to.not.exist() - node2 = node - cb() - }) - }, - (cb) => { - const link = node2.toJSON() - link.name = 'some-link' - - DAGNode.addLink(node1a, link, (err, node) => { - expect(err).to.not.exist() - node1b = node - cb() - }) - }, - (cb) => { - ipfs.object.put(node1b, cb) - }, - (cb) => { - ipfs.object.stat(node1b.multihash, (err, stats) => { - expect(err).to.not.exist() - const expected = { - Hash: 'QmPR7W4kaADkAo4GKEVVPQN81EDUFCHJtqejQZ5dEG7pBC', - NumLinks: 1, - BlockSize: 64, - LinksSize: 53, - DataSize: 11, - CumulativeSize: 77 - } - expect(expected).to.eql(stats) - cb() - }) - } - ], done) - }) - - it('with multihash base58 encoded', (done) => { - const testObj = { - Data: Buffer.from('get test object'), - Links: [] - } - - ipfs.object.put(testObj, (err, node) => { - expect(err).to.not.exist() - - ipfs.object.stat(bs58.encode(node.multihash), { enc: 'base58' }, (err, stats) => { - expect(err).to.not.exist() - const expected = { - Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', - NumLinks: 0, - BlockSize: 17, - LinksSize: 2, - DataSize: 15, - CumulativeSize: 17 - } - expect(expected).to.deep.equal(stats) - done() - }) - }) - }) - - it('with multihash base58 encoded toString', (done) => { - const testObj = { - Data: Buffer.from('get test object'), - Links: [] - } - - ipfs.object.put(testObj, (err, node) => { - expect(err).to.not.exist() - - ipfs.object.stat(bs58.encode(node.multihash).toString(), { enc: 'base58' }, (err, stats) => { - expect(err).to.not.exist() - const expected = { - Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', - NumLinks: 0, - BlockSize: 17, - LinksSize: 2, - DataSize: 15, - CumulativeSize: 17 - } - expect(expected).to.deep.equal(stats) - done() - }) - }) - }) - }) - - describe('.patch', () => { - let testNodeMultihash - let testNodeWithLinkMultihash - let testLink - - const obj = { - Data: Buffer.from('patch test object'), - Links: [] - } - - before((done) => { - ipfs.object.put(obj, (err, node) => { - expect(err).to.not.exist() - testNodeMultihash = node.multihash - done() - }) - }) - - it('.addLink', (done) => { - let node1a - let node1b - let node2 - - series([ - (cb) => { - DAGNode.create(obj.Data, obj.Links, (err, node) => { - expect(err).to.not.exist() - node1a = node - cb() - }) - }, - (cb) => { - DAGNode.create(Buffer.from('some other node'), (err, node) => { - expect(err).to.not.exist() - node2 = node - cb() - }) - }, - (cb) => { - // note: we need to put the linked obj, otherwise IPFS won't - // timeout. Reason: it needs the node to get its size - ipfs.object.put(node2, cb) - }, - (cb) => { - const link = node2.toJSON() - link.name = 'link-to-node' - DAGNode.addLink(node1a, link, (err, node) => { - expect(err).to.not.exist() - node1b = node - cb() - }) - }, - (cb) => { - ipfs.object.patch.addLink(testNodeMultihash, node1b.links[0], (err, node) => { - expect(err).to.not.exist() - expect(node1b.multihash).to.eql(node.multihash) - testNodeWithLinkMultihash = node.multihash - testLink = node1b.links[0] - cb() - }) - } - /* TODO: revisit this assertions. - (cb) => { - // note: make sure we can link js plain objects - const content = Buffer.from(JSON.stringify({ - title: 'serialized object' - }, null, 0)) - ipfs.add(content, (err, result) => { - expect(err).to.not.exist() - expect(result).to.exist() - expect(result).to.have.lengthOf(1) - const object = result.pop() - node3 = { - name: object.hash, - multihash: object.hash, - size: object.size - } - cb() - }) - }, - (cb) => { - ipfs.object.patch.addLink(testNodeWithLinkMultihash, node3, (err, node) => { - expect(err).to.not.exist() - expect(node).to.exist() - testNodeWithLinkMultihash = node.multihash - testLinkPlainObject = node3 - cb() - }) - } - */ - ], done) - }) - - it('.rmLink', (done) => { - series([ - (cb) => { - ipfs.object.patch.rmLink(testNodeWithLinkMultihash, testLink, (err, node) => { - expect(err).to.not.exist() - expect(node.multihash).to.not.deep.equal(testNodeWithLinkMultihash) - testNodeWithLinkMultihash = node.multihash - - cb() - }) - } - /* TODO: revisit this assertions. - (cb) => { - ipfs.object.patch.rmLink(testNodeWithLinkMultihash, testLinkPlainObject, (err, node) => { - expect(err).to.not.exist() - expect(node.multihash).to.not.deep.equal(testNodeWithLinkMultihash) - cb() - }) - } - */ - ], done) - }) - - it('.appendData', (done) => { - ipfs.object.patch.appendData(testNodeMultihash, Buffer.from('append'), (err, node) => { - expect(err).to.not.exist() - expect(node.multihash).to.not.deep.equal(testNodeMultihash) - done() - }) - }) - - it('.setData', (done) => { - ipfs.object.patch.appendData(testNodeMultihash, Buffer.from('set'), (err, node) => { - expect(err).to.not.exist() - expect(node.multihash).to.not.deep.equal(testNodeMultihash) - done() - }) - }) - }) - }) - - describe('promise API', () => { - it('object.new', () => { - return ipfs.object.new() - .then((node) => { - const nodeJSON = node.toJSON() - expect(nodeJSON.multihash).to.equal('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') - }) - }) - - it('object.put', () => { - const obj = { - Data: Buffer.from('Some data'), - Links: [] - } - - return ipfs.object.put(obj) - .then((node) => { - const nodeJSON = node.toJSON() - expect(obj.Data).to.deep.equal(nodeJSON.data) - expect(obj.Links).to.deep.equal(nodeJSON.links) - expect(nodeJSON.multihash).to.equal('QmPb5f92FxKPYdT3QNBd1GKiL4tZUXUrzF4Hkpdr3Gf1gK') - }) - }) - - it('object.get', () => { - const testObj = { - Data: Buffer.from('get test object'), - Links: [] - } - - return ipfs.object.put(testObj).then((node1) => { - return ipfs.object.get(node1.multihash).then((node2) => { - // because js-ipfs-api can't infer if the - // returned Data is Buffer or String - if (typeof node2.data === 'string') { - node2.data = Buffer.from(node2.data) - } - - expect(node1.data).to.deep.equal(node2.data) - expect(node1.links).to.deep.equal(node2.links) - }) - }) - }) - - it('object.get multihash string', () => { - return ipfs.object.get('QmPb5f92FxKPYdT3QNBd1GKiL4tZUXUrzF4Hkpdr3Gf1gK').then((node) => { - expect(node.data).to.exist() - }) - }) - - it('object.data', () => { - const testObj = { - Data: Buffer.from('get test object'), - Links: [] - } - - return ipfs.object.put(testObj).then((node) => { - return ipfs.object.data(node.multihash).then((data) => { - // because js-ipfs-api can't infer - // if the returned Data is Buffer or String - if (typeof data === 'string') { - data = Buffer.from(data) - } - expect(node.data).to.deep.equal(data) - }) - }) - }) - - it('object.stat', () => { - const testObj = { - Data: Buffer.from('get test object'), - Links: [] - } - - return ipfs.object.put(testObj, (err, node) => { - expect(err).to.not.exist() - - return ipfs.object.stat('QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', {enc: 'base58'}) - .then((stats) => { - const expected = { - Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', - NumLinks: 0, - BlockSize: 17, - LinksSize: 2, - DataSize: 15, - CumulativeSize: 17 - } - expect(expected).to.deep.equal(stats) - }) - }) - }) - - it('object.links', () => { - const testObj = { - Data: Buffer.from('get test object'), - Links: [] - } - - return ipfs.object.put(testObj).then((node) => { - return ipfs.object.links(node.multihash).then((links) => { - expect(node.links).to.eql(links) - }) - }) - }) - - describe('object.patch', () => { - let testNodeMultihash - let testNodeWithLinkMultihash - let testLink - - const obj = { - Data: Buffer.from('patch test object'), - Links: [] - } - - before(() => { - return ipfs.object.put(obj) - .then((node) => { - testNodeMultihash = node.multihash - }) - }) - - it('.addLink', () => { - let node1a - let node1b - let node2 - return new Promise((resolve, reject) => { - DAGNode.create(obj.Data, obj.Links, function (err, node) { - if (err) { - return reject(err) - } - return resolve(node) - }) - }).then((node) => { - node1a = node - return new Promise((resolve, reject) => { - DAGNode.create(Buffer.from('some other node'), function (err, node) { - if (err) { - return reject(err) - } - return resolve(node) - }) - }).then((node1) => { - node2 = node1 - return ipfs.object.put(node2) - }) - }).then(() => { - const link = node2.toJSON() - link.name = 'link-to-node' - return new Promise((resolve, reject) => { - DAGNode.addLink(node1a, link, function (err, node) { - if (err) { - return reject(err) - } - return resolve(node) - }) - }).then((node) => { - node1b = node - return ipfs.object.patch.addLink(testNodeMultihash, node1b.links[0]) - }) - }).then((node) => { - expect(node1b.multihash).to.eql(node.multihash) - testNodeWithLinkMultihash = node.multihash - testLink = node1b.links[0] - }) - }) - - it('.rmLink', () => { - return ipfs.object.patch.rmLink(testNodeWithLinkMultihash, testLink) - .then((node) => { - expect(node.multihash).to.not.deep.equal(testNodeWithLinkMultihash) - }) - }) - - it('.appendData', () => { - return ipfs.object.patch.appendData(testNodeMultihash, Buffer.from('append')) - .then((node) => { - expect(node.multihash).to.not.deep.equal(testNodeMultihash) - }) - }) - - it('.setData', () => { - return ipfs.object.patch.appendData(testNodeMultihash, Buffer.from('set')) - .then((node) => { - expect(node.multihash).to.not.deep.equal(testNodeMultihash) - }) - }) - }) - }) - }) -} diff --git a/js/src/object/index.js b/js/src/object/index.js new file mode 100644 index 000000000..63fac9706 --- /dev/null +++ b/js/src/object/index.js @@ -0,0 +1,14 @@ +'use strict' +const { createSuite } = require('../utils/suite') + +const tests = { + new: require('./new'), + put: require('./put'), + get: require('./get'), + data: require('./data'), + links: require('./links'), + stat: require('./stat'), + patch: require('./patch') +} + +module.exports = createSuite(tests) diff --git a/js/src/object/new.js b/js/src/object/new.js new file mode 100644 index 000000000..d05f5ac44 --- /dev/null +++ b/js/src/object/new.js @@ -0,0 +1,67 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.object.new', function () { + this.timeout(80 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should create a new object with no template', (done) => { + ipfs.object.new((err, node) => { + expect(err).to.not.exist() + const nodeJSON = node.toJSON() + expect(nodeJSON.multihash).to.equal('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') + done() + }) + }) + + it('should create a new object with no template (promised)', () => { + return ipfs.object.new() + .then((node) => { + const nodeJSON = node.toJSON() + expect(nodeJSON.multihash).to.equal('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') + }) + }) + + it('should create a new object with unixfs-dir template', (done) => { + ipfs.object.new('unixfs-dir', (err, node) => { + expect(err).to.not.exist() + const nodeJSON = node.toJSON() + expect(nodeJSON.multihash) + .to.equal('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') + done() + }) + }) + }) +} diff --git a/js/src/object/patch/add-link.js b/js/src/object/patch/add-link.js new file mode 100644 index 000000000..03cba11fa --- /dev/null +++ b/js/src/object/patch/add-link.js @@ -0,0 +1,186 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const dagPB = require('ipld-dag-pb') +const DAGNode = dagPB.DAGNode +const series = require('async/series') +const { getDescribe, getIt } = require('../../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.object.patch.addLink', function () { + this.timeout(80 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should add a link to an existing node', (done) => { + let testNodeMultihash + let node1a + let node1b + let node2 + + const obj = { + Data: Buffer.from('patch test object'), + Links: [] + } + + series([ + (cb) => { + ipfs.object.put(obj, (err, node) => { + expect(err).to.not.exist() + testNodeMultihash = node.multihash + cb() + }) + }, + (cb) => { + DAGNode.create(obj.Data, obj.Links, (err, node) => { + expect(err).to.not.exist() + node1a = node + cb() + }) + }, + (cb) => { + DAGNode.create(Buffer.from('some other node'), (err, node) => { + expect(err).to.not.exist() + node2 = node + cb() + }) + }, + (cb) => { + // note: we need to put the linked obj, otherwise IPFS won't + // timeout. Reason: it needs the node to get its size + ipfs.object.put(node2, cb) + }, + (cb) => { + const link = node2.toJSON() + link.name = 'link-to-node' + DAGNode.addLink(node1a, link, (err, node) => { + expect(err).to.not.exist() + node1b = node + cb() + }) + }, + (cb) => { + ipfs.object.patch.addLink(testNodeMultihash, node1b.links[0], (err, node) => { + expect(err).to.not.exist() + expect(node1b.multihash).to.eql(node.multihash) + cb() + }) + } + /* TODO: revisit this assertions. + (cb) => { + // note: make sure we can link js plain objects + const content = Buffer.from(JSON.stringify({ + title: 'serialized object' + }, null, 0)) + ipfs.add(content, (err, result) => { + expect(err).to.not.exist() + expect(result).to.exist() + expect(result).to.have.lengthOf(1) + const object = result.pop() + node3 = { + name: object.hash, + multihash: object.hash, + size: object.size + } + cb() + }) + }, + (cb) => { + ipfs.object.patch.addLink(testNodeWithLinkMultihash, node3, (err, node) => { + expect(err).to.not.exist() + expect(node).to.exist() + testNodeWithLinkMultihash = node.multihash + testLinkPlainObject = node3 + cb() + }) + } + */ + ], done) + }) + + it('should add a link to an existing node (promised)', () => { + let testNodeMultihash + let node1a + let node1b + let node2 + + const obj = { + Data: Buffer.from('patch test object (promised)'), + Links: [] + } + + return ipfs.object.put(obj) + .then((node) => { + testNodeMultihash = node.multihash + }) + .then(() => new Promise((resolve, reject) => { + DAGNode.create(obj.Data, obj.Links, function (err, node) { + if (err) { + return reject(err) + } + return resolve(node) + }) + })) + .then((node) => { + node1a = node + return new Promise((resolve, reject) => { + DAGNode.create(Buffer.from('some other node'), function (err, node) { + if (err) { + return reject(err) + } + return resolve(node) + }) + }).then((node1) => { + node2 = node1 + return ipfs.object.put(node2) + }) + }) + .then(() => { + const link = node2.toJSON() + link.name = 'link-to-node' + return new Promise((resolve, reject) => { + DAGNode.addLink(node1a, link, function (err, node) { + if (err) { + return reject(err) + } + return resolve(node) + }) + }).then((node) => { + node1b = node + return ipfs.object.patch.addLink(testNodeMultihash, node1b.links[0]) + }) + }) + .then((node) => { + expect(node1b.multihash).to.eql(node.multihash) + }) + }) + }) +} diff --git a/js/src/object/patch/append-data.js b/js/src/object/patch/append-data.js new file mode 100644 index 000000000..4ee678740 --- /dev/null +++ b/js/src/object/patch/append-data.js @@ -0,0 +1,73 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.object.patch.appendData', function () { + this.timeout(80 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should append data to an existing node', (done) => { + const obj = { + Data: Buffer.from('patch test object'), + Links: [] + } + + ipfs.object.put(obj, (err, node) => { + expect(err).to.not.exist() + + ipfs.object.patch.appendData(node.multihash, Buffer.from('append'), (err, patchedNode) => { + expect(err).to.not.exist() + expect(patchedNode.multihash).to.not.deep.equal(node.multihash) + done() + }) + }) + }) + + it('should append data to an existing node (promised)', () => { + const obj = { + Data: Buffer.from('patch test object (promised)'), + Links: [] + } + + return ipfs.object.put(obj) + .then((node) => { + return ipfs.object.patch.appendData(node.multihash, Buffer.from('append')) + .then((patchedNode) => ({ patchedNode, node })) + }) + .then(({ patchedNode, node }) => { + expect(patchedNode.multihash).to.not.deep.equal(node.multihash) + }) + }) + }) +} diff --git a/js/src/object/patch/index.js b/js/src/object/patch/index.js new file mode 100644 index 000000000..d84efe199 --- /dev/null +++ b/js/src/object/patch/index.js @@ -0,0 +1,11 @@ +'use strict' +const { createSuite } = require('../../utils/suite') + +const tests = { + addLink: require('./add-link'), + rmLink: require('./rm-link'), + appendData: require('./append-data'), + setData: require('./set-data') +} + +module.exports = createSuite(tests, 'patch') diff --git a/js/src/object/patch/rm-link.js b/js/src/object/patch/rm-link.js new file mode 100644 index 000000000..827175817 --- /dev/null +++ b/js/src/object/patch/rm-link.js @@ -0,0 +1,131 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const dagPB = require('ipld-dag-pb') +const DAGLink = dagPB.DAGLink +const series = require('async/series') +const { getDescribe, getIt } = require('../../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.object.patch.rmLink', function () { + this.timeout(80 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should remove a link from an existing node', (done) => { + let node1a + let node1b + let node2 + let testLink + + const obj1 = { + Data: Buffer.from('patch test object 1'), + Links: [] + } + + const obj2 = { + Data: Buffer.from('patch test object 2'), + Links: [] + } + + series([ + (cb) => { + ipfs.object.put(obj1, (err, node) => { + expect(err).to.not.exist() + node1a = node + cb() + }) + }, + (cb) => { + ipfs.object.put(obj2, (err, node) => { + expect(err).to.not.exist() + node2 = node + cb() + }) + }, + (cb) => { + testLink = new DAGLink('link-to-node', node2.size, node2.multihash) + + ipfs.object.patch.addLink(node1a.multihash, testLink, (err, node) => { + expect(err).to.not.exist() + node1b = node + cb() + }) + }, + (cb) => { + ipfs.object.patch.rmLink(node1b.multihash, testLink, (err, node) => { + expect(err).to.not.exist() + expect(node.multihash).to.not.deep.equal(node1b.multihash) + cb() + }) + } + /* TODO: revisit this assertions. + (cb) => { + ipfs.object.patch.rmLink(testNodeWithLinkMultihash, testLinkPlainObject, (err, node) => { + expect(err).to.not.exist() + expect(node.multihash).to.not.deep.equal(testNodeWithLinkMultihash) + cb() + }) + } + */ + ], done) + }) + + it('should remove a link from an existing node (promised)', () => { + let node1a + let node1b + let node2 + let testLink + + const obj1 = { + Data: Buffer.from('patch test object 1'), + Links: [] + } + + const obj2 = { + Data: Buffer.from('patch test object 2'), + Links: [] + } + + return ipfs.object.put(obj1) + .then((node) => { node1a = node }) + .then(() => ipfs.object.put(obj2)) + .then((node) => { node2 = node }) + .then(() => { + testLink = new DAGLink('link-to-node', node2.size, node2.multihash) + return ipfs.object.patch.addLink(node1a.multihash, testLink) + }) + .then((node) => { node1b = node }) + .then(() => ipfs.object.patch.rmLink(node1b.multihash, testLink)) + .then((node) => expect(node.multihash).to.not.deep.equal(node1b.multihash)) + }) + }) +} diff --git a/js/src/object/patch/set-data.js b/js/src/object/patch/set-data.js new file mode 100644 index 000000000..ef9e73c54 --- /dev/null +++ b/js/src/object/patch/set-data.js @@ -0,0 +1,73 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.object.patch.setData', function () { + this.timeout(80 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should set data for an existing node', (done) => { + const obj = { + Data: Buffer.from('patch test object'), + Links: [] + } + + ipfs.object.put(obj, (err, node) => { + expect(err).to.not.exist() + + ipfs.object.patch.setData(node.multihash, Buffer.from('set'), (err, patchedNode) => { + expect(err).to.not.exist() + expect(node.multihash).to.not.deep.equal(patchedNode.multihash) + done() + }) + }) + }) + + it('should set data for an existing node (promised)', () => { + const obj = { + Data: Buffer.from('patch test object (promised)'), + Links: [] + } + + return ipfs.object.put(obj) + .then((node) => { + return ipfs.object.patch.setData(node.multihash, Buffer.from('set')) + .then((patchedNode) => ({ patchedNode, node })) + }) + .then(({ patchedNode, node }) => { + expect(node.multihash).to.not.deep.equal(patchedNode.multihash) + }) + }) + }) +} diff --git a/js/src/utils/suite.js b/js/src/utils/suite.js index 92db4e4af..4d8b1ca5f 100644 --- a/js/src/utils/suite.js +++ b/js/src/utils/suite.js @@ -1,16 +1,17 @@ -function createSuite (tests) { +function createSuite (tests, parent) { const suite = (createCommon, options) => { Object.keys(tests).forEach(t => { const opts = Object.assign({}, options) + const suiteName = parent ? `${parent}.${t}` : t if (Array.isArray(opts.skip)) { - if (opts.skip.includes(t)) { + if (opts.skip.includes(suiteName)) { opts.skip = true } } if (Array.isArray(opts.only)) { - if (opts.only.includes(t)) { + if (opts.only.includes(suiteName)) { opts.only = true } } From 93f51dc408fa42cea793dbe657a807971e5610e3 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Thu, 7 Jun 2018 23:12:21 +0100 Subject: [PATCH 21/41] feat: modularise object.data,get,links,put and stat License: MIT Signed-off-by: Alan Shaw --- README.md | 10 +- js/src/object/data.js | 129 ++++++++++++++++++ js/src/object/get.js | 296 +++++++++++++++++++++++++++++++++++++++++ js/src/object/links.js | 151 +++++++++++++++++++++ js/src/object/put.js | 201 ++++++++++++++++++++++++++++ js/src/object/stat.js | 194 +++++++++++++++++++++++++++ 6 files changed, 979 insertions(+), 2 deletions(-) create mode 100644 js/src/object/data.js create mode 100644 js/src/object/get.js create mode 100644 js/src/object/links.js create mode 100644 js/src/object/put.js create mode 100644 js/src/object/stat.js diff --git a/README.md b/README.md index 165d246d0..57ad20721 100644 --- a/README.md +++ b/README.md @@ -107,7 +107,10 @@ tests.repo.gc(createCommon, { skip: true }) // pass an options object to skip th // OR, at the subsystem level -tests.repo(createCommon, { skip: ['gc'] }) // skips ALL the repo.gc tests +// skips ALL the repo.gc tests +tests.repo(createCommon, { skip: ['gc'] }) +// skips ALL the object.patch.addLink tests +tests.object(createCommon, { skip: ['patch.addLink'] }) ``` ##### Skipping specific tests @@ -127,7 +130,10 @@ tests.repo.gc(createCommon, { only: true }) // pass an options object to run onl // OR, at the subsystem level -tests.repo(createCommon, { only: ['gc'] }) // runs only ALL the repo.gc tests +// runs only ALL the repo.gc tests +tests.repo(createCommon, { only: ['gc'] }) +// runs only ALL the object.patch.addLink tests +tests.object(createCommon, { only: ['patch.addLink'] }) ``` ##### Running only specific tests diff --git a/js/src/object/data.js b/js/src/object/data.js new file mode 100644 index 000000000..c1ea8e45b --- /dev/null +++ b/js/src/object/data.js @@ -0,0 +1,129 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const bs58 = require('bs58') +const hat = require('hat') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.object.data', function () { + this.timeout(80 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should get data by multihash', (done) => { + const testObj = { + Data: Buffer.from(hat()), + Links: [] + } + + ipfs.object.put(testObj, (err, node) => { + expect(err).to.not.exist() + + ipfs.object.data(node.multihash, (err, data) => { + expect(err).to.not.exist() + + // because js-ipfs-api can't infer + // if the returned Data is Buffer or String + if (typeof data === 'string') { + data = Buffer.from(data) + } + expect(node.data).to.eql(data) + done() + }) + }) + }) + + it('should get data by multihash (promised)', () => { + const testObj = { + Data: Buffer.from(hat()), + Links: [] + } + + return ipfs.object.put(testObj).then((node) => { + return ipfs.object.data(node.multihash).then((data) => { + // because js-ipfs-api can't infer + // if the returned Data is Buffer or String + if (typeof data === 'string') { + data = Buffer.from(data) + } + expect(node.data).to.deep.equal(data) + }) + }) + }) + + it('should get data by base58 encoded multihash', (done) => { + const testObj = { + Data: Buffer.from(hat()), + Links: [] + } + + ipfs.object.put(testObj, (err, node) => { + expect(err).to.not.exist() + + ipfs.object.data(bs58.encode(node.multihash), { enc: 'base58' }, (err, data) => { + expect(err).to.not.exist() + + // because js-ipfs-api can't infer + // if the returned Data is Buffer or String + if (typeof data === 'string') { + data = Buffer.from(data) + } + expect(node.data).to.eql(data) + done() + }) + }) + }) + + it('should get data by base58 encoded multihash string', (done) => { + const testObj = { + Data: Buffer.from(hat()), + Links: [] + } + + ipfs.object.put(testObj, (err, node) => { + expect(err).to.not.exist() + + ipfs.object.data(bs58.encode(node.multihash).toString(), { enc: 'base58' }, (err, data) => { + expect(err).to.not.exist() + + // because js-ipfs-api can't infer if the + // returned Data is Buffer or String + if (typeof data === 'string') { + data = Buffer.from(data) + } + expect(node.data).to.eql(data) + done() + }) + }) + }) + }) +} diff --git a/js/src/object/get.js b/js/src/object/get.js new file mode 100644 index 000000000..c75f6d123 --- /dev/null +++ b/js/src/object/get.js @@ -0,0 +1,296 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const dagPB = require('ipld-dag-pb') +const DAGNode = dagPB.DAGNode +const bs58 = require('bs58') +const series = require('async/series') +const hat = require('hat') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.object.get', function () { + this.timeout(80 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should get object by multihash', (done) => { + const obj = { + Data: Buffer.from(hat()), + Links: [] + } + + let node1 + let node2 + + series([ + (cb) => { + ipfs.object.put(obj, (err, node) => { + expect(err).to.not.exist() + node1 = node + cb() + }) + }, + (cb) => { + ipfs.object.get(node1.multihash, (err, node) => { + expect(err).to.not.exist() + node2 = node + + // because js-ipfs-api can't infer if the + // returned Data is Buffer or String + if (typeof node2.data === 'string') { + node2.data = Buffer.from(node2.data) + } + cb() + }) + }, + (cb) => { + expect(node1.data).to.eql(node2.data) + expect(node1.links).to.eql(node2.links) + expect(node1.multihash).to.eql(node2.multihash) + cb() + } + ], done) + }) + + it('should get object by multihash (promised)', () => { + const testObj = { + Data: Buffer.from(hat()), + Links: [] + } + + return ipfs.object.put(testObj).then((node1) => { + return ipfs.object.get(node1.multihash).then((node2) => { + // because js-ipfs-api can't infer if the + // returned Data is Buffer or String + if (typeof node2.data === 'string') { + node2.data = Buffer.from(node2.data) + } + + expect(node1.data).to.deep.equal(node2.data) + expect(node1.links).to.deep.equal(node2.links) + }) + }) + }) + + it('should get object by multihash string', (done) => { + const obj = { + Data: Buffer.from(hat()), + Links: [] + } + + let node1 + let node2 + + series([ + (cb) => { + ipfs.object.put(obj, (err, node) => { + expect(err).to.not.exist() + node1 = node + cb() + }) + }, + (cb) => { + // get object from ipfs multihash string + ipfs.object.get(node1.toJSON().multihash, (err, node) => { + expect(err).to.not.exist() + // because js-ipfs-api can't infer if the + // returned Data is Buffer or String + if (typeof node.data === 'string') { + node.data = Buffer.from(node.data) + } + node2 = node + cb() + }) + }, + (cb) => { + expect(node1.data).to.eql(node2.data) + expect(node1.links).to.eql(node2.links) + expect(node1.multihash).to.eql(node2.multihash) + cb() + } + ], done) + }) + + it('should get object by multihash string (promised)', () => { + const obj = { + Data: Buffer.from(hat()), + Links: [] + } + + return ipfs.object.put(obj) + .then((node1) => { + return ipfs.object.get(node1.toJSON().multihash) + .then((node2) => { + // because js-ipfs-api can't infer if the + // returned Data is Buffer or String + if (typeof node2.data === 'string') { + node2.data = Buffer.from(node2.data) + } + + expect(node1.data).to.deep.equal(node2.data) + expect(node1.links).to.deep.equal(node2.links) + }) + }) + }) + + it('should get object with links by multihash string', (done) => { + let node1a + let node1b + let node1c + let node2 + + series([ + (cb) => { + DAGNode.create(Buffer.from('Some data 1'), (err, node) => { + expect(err).to.not.exist() + node1a = node + cb() + }) + }, + (cb) => { + DAGNode.create(Buffer.from('Some data 2'), (err, node) => { + expect(err).to.not.exist() + node2 = node + cb() + }) + }, + (cb) => { + const link = node2.toJSON() + link.name = 'some-link' + DAGNode.addLink(node1a, link, (err, node) => { + expect(err).to.not.exist() + node1b = node + cb() + }) + }, + (cb) => { + ipfs.object.put(node1b, cb) + }, + (cb) => { + ipfs.object.get(node1b.multihash, (err, node) => { + expect(err).to.not.exist() + + // because js-ipfs-api can't infer if the + // returned Data is Buffer or String + if (typeof node.data === 'string') { + node.data = Buffer.from(node.data) + } + + node1c = node + cb() + }) + }, + (cb) => { + expect(node1a.data).to.eql(node1c.data) + expect(node1b.multihash).to.eql(node1c.multihash) + cb() + } + ], done) + }) + + it('should get object by base58 encoded multihash', (done) => { + const obj = { + Data: Buffer.from(hat()), + Links: [] + } + + let node1a + let node1b + + series([ + (cb) => { + ipfs.object.put(obj, (err, node) => { + expect(err).to.not.exist() + node1a = node + cb() + }) + }, + (cb) => { + ipfs.object.get(node1a.multihash, { enc: 'base58' }, (err, node) => { + expect(err).to.not.exist() + // because js-ipfs-api can't infer if the + // returned Data is Buffer or String + if (typeof node.data === 'string') { + node.data = Buffer.from(node.data) + } + node1b = node + cb() + }) + }, + (cb) => { + expect(node1a.multihash).to.eql(node1b.multihash) + expect(node1a.data).to.eql(node1b.data) + expect(node1a.links).to.eql(node1b.links) + cb() + } + ], done) + }) + + it('should get object by base58 encoded multihash string', (done) => { + const obj = { + Data: Buffer.from(hat()), + Links: [] + } + + let node1a + let node1b + + series([ + (cb) => { + ipfs.object.put(obj, (err, node) => { + expect(err).to.not.exist() + node1a = node + cb() + }) + }, + (cb) => { + ipfs.object.get(bs58.encode(node1a.multihash).toString(), { enc: 'base58' }, (err, node) => { + expect(err).to.not.exist() + // because js-ipfs-api can't infer if the + // returned Data is Buffer or String + if (typeof node.data === 'string') { + node.data = Buffer.from(node.data) + } + node1b = node + cb() + }) + }, + (cb) => { + expect(node1a.multihash).to.eql(node1b.multihash) + expect(node1a.data).to.eql(node1b.data) + expect(node1a.links).to.eql(node1b.links) + cb() + } + ], done) + }) + }) +} diff --git a/js/src/object/links.js b/js/src/object/links.js new file mode 100644 index 000000000..beddb2abd --- /dev/null +++ b/js/src/object/links.js @@ -0,0 +1,151 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const dagPB = require('ipld-dag-pb') +const DAGNode = dagPB.DAGNode +const bs58 = require('bs58') +const series = require('async/series') +const hat = require('hat') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.object.links', function () { + this.timeout(80 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should get empty links by multihash', (done) => { + const testObj = { + Data: Buffer.from(hat()), + Links: [] + } + + ipfs.object.put(testObj, (err, node) => { + expect(err).to.not.exist() + + ipfs.object.links(node.multihash, (err, links) => { + expect(err).to.not.exist() + expect(node.links).to.deep.equal(links) + done() + }) + }) + }) + + it('should get empty links by multihash (promised)', () => { + const testObj = { + Data: Buffer.from(hat()), + Links: [] + } + + return ipfs.object.put(testObj).then((node) => { + return ipfs.object.links(node.multihash).then((links) => { + expect(node.links).to.eql(links) + }) + }) + }) + + it('should get links by multihash', (done) => { + let node1a + let node1b + let node2 + + series([ + (cb) => { + DAGNode.create(Buffer.from('Some data 1'), (err, node) => { + expect(err).to.not.exist() + node1a = node + cb() + }) + }, + (cb) => { + DAGNode.create(Buffer.from('Some data 2'), (err, node) => { + expect(err).to.not.exist() + node2 = node + cb() + }) + }, + (cb) => { + const link = node2.toJSON() + link.name = 'some-link' + + DAGNode.addLink(node1a, link, (err, node) => { + expect(err).to.not.exist() + node1b = node + cb() + }) + }, + (cb) => { + ipfs.object.put(node1b, cb) + }, + (cb) => { + ipfs.object.links(node1b.multihash, (err, links) => { + expect(err).to.not.exist() + expect(node1b.links[0].toJSON()).to.eql(links[0].toJSON()) + cb() + }) + } + ], done) + }) + + it('should get links by base58 encoded multihash', (done) => { + const testObj = { + Data: Buffer.from(hat()), + Links: [] + } + + ipfs.object.put(testObj, (err, node) => { + expect(err).to.not.exist() + + ipfs.object.links(bs58.encode(node.multihash), { enc: 'base58' }, (err, links) => { + expect(err).to.not.exist() + expect(node.links).to.deep.equal(links) + done() + }) + }) + }) + + it('should get links by base58 encoded multihash string', (done) => { + const testObj = { + Data: Buffer.from(hat()), + Links: [] + } + + ipfs.object.put(testObj, (err, node) => { + expect(err).to.not.exist() + ipfs.object.links(bs58.encode(node.multihash), { enc: 'base58' }, (err, links) => { + expect(err).to.not.exist() + expect(node.links).to.deep.equal(links) + done() + }) + }) + }) + }) +} diff --git a/js/src/object/put.js b/js/src/object/put.js new file mode 100644 index 000000000..4391425a0 --- /dev/null +++ b/js/src/object/put.js @@ -0,0 +1,201 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const dagPB = require('ipld-dag-pb') +const DAGNode = dagPB.DAGNode +const series = require('async/series') +const hat = require('hat') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.object.put', function () { + this.timeout(80 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should put an object', (done) => { + const obj = { + Data: Buffer.from(hat()), + Links: [] + } + + ipfs.object.put(obj, (err, node) => { + expect(err).to.not.exist() + const nodeJSON = node.toJSON() + expect(nodeJSON.data).to.eql(obj.Data) + expect(nodeJSON.links).to.eql(obj.Links) + expect(nodeJSON.multihash).to.exist() + done() + }) + }) + + it('should put an object (promised)', () => { + const obj = { + Data: Buffer.from(hat()), + Links: [] + } + + return ipfs.object.put(obj) + .then((node) => { + const nodeJSON = node.toJSON() + expect(obj.Data).to.deep.equal(nodeJSON.data) + expect(obj.Links).to.deep.equal(nodeJSON.links) + expect(nodeJSON.multihash).to.exist() + }) + }) + + it('should put a JSON encoded Buffer', (done) => { + const obj = { + Data: Buffer.from(hat()), + Links: [] + } + + const obj2 = { + Data: obj.Data.toString(), + Links: obj.Links + } + + const buf = Buffer.from(JSON.stringify(obj2)) + + ipfs.object.put(buf, { enc: 'json' }, (err, node) => { + expect(err).to.not.exist() + const nodeJSON = node.toJSON() + + expect(nodeJSON.data).to.eql(node.data) + expect(nodeJSON.multihash).to.exist() + done() + }) + }) + + it('should put a Protobuf encoded Buffer', (done) => { + let node + let serialized + + series([ + (cb) => { + DAGNode.create(Buffer.from(hat()), (err, _node) => { + expect(err).to.not.exist() + node = _node + cb() + }) + }, + (cb) => { + dagPB.util.serialize(node, (err, _serialized) => { + expect(err).to.not.exist() + serialized = _serialized + cb() + }) + }, + (cb) => { + ipfs.object.put(serialized, { enc: 'protobuf' }, (err, storedNode) => { + expect(err).to.not.exist() + expect(node.data).to.deep.equal(node.data) + expect(node.links).to.deep.equal(node.links) + expect(node.multihash).to.eql(storedNode.multihash) + cb() + }) + } + ], done) + }) + + it('should put a Buffer as data', (done) => { + const data = Buffer.from(hat()) + ipfs.object.put(data, (err, node) => { + expect(err).to.not.exist() + const nodeJSON = node.toJSON() + expect(data).to.deep.equal(nodeJSON.data) + expect([]).to.deep.equal(nodeJSON.links) + expect(nodeJSON.multihash).to.exist() + done() + }) + }) + + it('should put a Protobuf DAGNode', (done) => { + DAGNode.create(Buffer.from(hat()), (err, dNode) => { + expect(err).to.not.exist() + ipfs.object.put(dNode, (err, node) => { + expect(err).to.not.exist() + expect(dNode.data).to.deep.equal(node.data) + expect(dNode.links).to.deep.equal(node.links) + done() + }) + }) + }) + + it('should fail if a string is passed', (done) => { + ipfs.object.put(hat(), (err) => { + expect(err).to.exist() + done() + }) + }) + + it('should put a Protobuf DAGNode with a link', (done) => { + let node1a + let node1b + let node2 + + series([ + (cb) => { + DAGNode.create(Buffer.from(hat()), (err, node) => { + expect(err).to.not.exist() + node1a = node + cb() + }) + }, + (cb) => { + DAGNode.create(Buffer.from(hat()), (err, node) => { + expect(err).to.not.exist() + node2 = node + cb() + }) + }, + (cb) => { + const link = node2.toJSON() + link.name = 'some-link' + DAGNode.addLink(node1a, link, (err, node) => { + expect(err).to.not.exist() + node1b = node + cb() + }) + }, + (cb) => { + ipfs.object.put(node1b, (err, node) => { + expect(err).to.not.exist() + expect(node1b.data).to.deep.equal(node.data) + expect(node1b.links.map((l) => l.toJSON())) + .to.deep.equal(node.links.map((l) => l.toJSON())) + cb() + }) + } + ], done) + }) + }) +} diff --git a/js/src/object/stat.js b/js/src/object/stat.js new file mode 100644 index 000000000..af266ac52 --- /dev/null +++ b/js/src/object/stat.js @@ -0,0 +1,194 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const dagPB = require('ipld-dag-pb') +const DAGNode = dagPB.DAGNode +const bs58 = require('bs58') +const series = require('async/series') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.object.stat', function () { + this.timeout(80 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should get stats by multihash', (done) => { + const testObj = { + Data: Buffer.from('get test object'), + Links: [] + } + + ipfs.object.put(testObj, (err, node) => { + expect(err).to.not.exist() + + ipfs.object.stat(node.multihash, (err, stats) => { + expect(err).to.not.exist() + const expected = { + Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', + NumLinks: 0, + BlockSize: 17, + LinksSize: 2, + DataSize: 15, + CumulativeSize: 17 + } + expect(expected).to.deep.equal(stats) + done() + }) + }) + }) + + it('should get stats for object by multihash (promised)', () => { + const testObj = { + Data: Buffer.from('get test object'), + Links: [] + } + + return ipfs.object.put(testObj, (err, node) => { + expect(err).to.not.exist() + + return ipfs.object.stat('QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', {enc: 'base58'}) + .then((stats) => { + const expected = { + Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', + NumLinks: 0, + BlockSize: 17, + LinksSize: 2, + DataSize: 15, + CumulativeSize: 17 + } + expect(expected).to.deep.equal(stats) + }) + }) + }) + + it('should get stats for object with links by multihash', (done) => { + let node1a + let node1b + let node2 + + series([ + (cb) => { + DAGNode.create(Buffer.from('Some data 1'), (err, node) => { + expect(err).to.not.exist() + node1a = node + cb() + }) + }, + (cb) => { + DAGNode.create(Buffer.from('Some data 2'), (err, node) => { + expect(err).to.not.exist() + node2 = node + cb() + }) + }, + (cb) => { + const link = node2.toJSON() + link.name = 'some-link' + + DAGNode.addLink(node1a, link, (err, node) => { + expect(err).to.not.exist() + node1b = node + cb() + }) + }, + (cb) => { + ipfs.object.put(node1b, cb) + }, + (cb) => { + ipfs.object.stat(node1b.multihash, (err, stats) => { + expect(err).to.not.exist() + const expected = { + Hash: 'QmPR7W4kaADkAo4GKEVVPQN81EDUFCHJtqejQZ5dEG7pBC', + NumLinks: 1, + BlockSize: 64, + LinksSize: 53, + DataSize: 11, + CumulativeSize: 77 + } + expect(expected).to.eql(stats) + cb() + }) + } + ], done) + }) + + it('should get stats by base58 encoded multihash', (done) => { + const testObj = { + Data: Buffer.from('get test object'), + Links: [] + } + + ipfs.object.put(testObj, (err, node) => { + expect(err).to.not.exist() + + ipfs.object.stat(bs58.encode(node.multihash), { enc: 'base58' }, (err, stats) => { + expect(err).to.not.exist() + const expected = { + Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', + NumLinks: 0, + BlockSize: 17, + LinksSize: 2, + DataSize: 15, + CumulativeSize: 17 + } + expect(expected).to.deep.equal(stats) + done() + }) + }) + }) + + it('should get stats by base58 encoded multihash string', (done) => { + const testObj = { + Data: Buffer.from('get test object'), + Links: [] + } + + ipfs.object.put(testObj, (err, node) => { + expect(err).to.not.exist() + + ipfs.object.stat(bs58.encode(node.multihash).toString(), { enc: 'base58' }, (err, stats) => { + expect(err).to.not.exist() + const expected = { + Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', + NumLinks: 0, + BlockSize: 17, + LinksSize: 2, + DataSize: 15, + CumulativeSize: 17 + } + expect(expected).to.deep.equal(stats) + done() + }) + }) + }) + }) +} From 96cf49aecd08dd144dc9bd79ae2432801f1132ac Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Fri, 8 Jun 2018 00:22:23 +0100 Subject: [PATCH 22/41] feat(pin): modularises pin License: MIT Signed-off-by: Alan Shaw --- js/src/pin/add.js | 65 ++++++++++++++++++++ js/src/pin/index.js | 10 +++ js/src/pin/ls.js | 147 ++++++++++++++++++++++++++++++++++++++++++++ js/src/pin/rm.js | 89 +++++++++++++++++++++++++++ js/src/pin/utils.js | 13 ++++ 5 files changed, 324 insertions(+) create mode 100644 js/src/pin/add.js create mode 100644 js/src/pin/index.js create mode 100644 js/src/pin/ls.js create mode 100644 js/src/pin/rm.js create mode 100644 js/src/pin/utils.js diff --git a/js/src/pin/add.js b/js/src/pin/add.js new file mode 100644 index 000000000..5a1851880 --- /dev/null +++ b/js/src/pin/add.js @@ -0,0 +1,65 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const each = require('async/each') +const { fixtures } = require('./utils') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.pin.add', function () { + this.timeout(50 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + populate() + }) + }) + + function populate () { + each(fixtures.files, (file, cb) => { + ipfs.files.add(file.data, { pin: false }, cb) + }, done) + } + }) + + after((done) => common.teardown(done)) + + it('should add a pin', (done) => { + ipfs.pin.add(fixtures.files[0].cid, { recursive: false }, (err, pinset) => { + expect(err).to.not.exist() + expect(pinset).to.deep.include({ + hash: fixtures.files[0].cid + }) + done() + }) + }) + + it('should add a pin (promised)', () => { + return ipfs.pin.add(fixtures.files[1].cid, { recursive: false }) + .then((pinset) => { + expect(pinset).to.deep.include({ + hash: fixtures.files[1].cid + }) + }) + }) + }) +} diff --git a/js/src/pin/index.js b/js/src/pin/index.js new file mode 100644 index 000000000..b3723d07c --- /dev/null +++ b/js/src/pin/index.js @@ -0,0 +1,10 @@ +'use strict' +const { createSuite } = require('../utils/suite') + +const tests = { + ls: require('./ls'), + rm: require('./rm'), + add: require('./add') +} + +module.exports = createSuite(tests) diff --git a/js/src/pin/ls.js b/js/src/pin/ls.js new file mode 100644 index 000000000..9251b9f9c --- /dev/null +++ b/js/src/pin/ls.js @@ -0,0 +1,147 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const parallel = require('async/parallel') +const { fixtures } = require('./utils') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.pin.ls', function () { + this.timeout(50 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + populate() + }) + }) + + function populate () { + parallel([ + (cb) => { + ipfs.files.add(fixtures.files[0].data, { pin: false }, (err, res) => { + if (err) return cb(err) + ipfs.pin.add(fixtures.files[0].cid, { recursive: true }, cb) + }) + }, + (cb) => { + ipfs.files.add(fixtures.files[1].data, { pin: false }, (err, res) => { + if (err) return cb(err) + ipfs.pin.add(fixtures.files[1].cid, { recursive: false }, cb) + }) + } + ], done) + } + }) + + after((done) => common.teardown(done)) + + // 1st, because ipfs.files.add pins automatically + it('should list recursive pins', (done) => { + ipfs.pin.ls({ type: 'recursive' }, (err, pinset) => { + expect(err).to.not.exist() + expect(pinset).to.deep.include({ + type: 'recursive', + hash: fixtures.files[0].cid + }) + done() + }) + }) + + it('should list indirect pins', (done) => { + ipfs.pin.ls({ type: 'indirect' }, (err, pinset) => { + expect(err).to.not.exist() + // because the pinned files have no links + expect(pinset).to.not.deep.include({ + type: 'recursive', + hash: fixtures.files[0].cid + }) + expect(pinset).to.not.deep.include({ + type: 'direct', + hash: fixtures.files[1].cid + }) + done() + }) + }) + + it('should list pins', (done) => { + ipfs.pin.ls((err, pinset) => { + expect(err).to.not.exist() + expect(pinset).to.not.be.empty() + expect(pinset).to.deep.include({ + type: 'recursive', + hash: fixtures.files[0].cid + }) + expect(pinset).to.deep.include({ + type: 'direct', + hash: fixtures.files[1].cid + }) + done() + }) + }) + + it('should list pins (promised)', () => { + return ipfs.pin.ls() + .then((pinset) => { + expect(pinset).to.deep.include({ + type: 'recursive', + hash: fixtures.files[0].cid + }) + expect(pinset).to.deep.include({ + type: 'direct', + hash: fixtures.files[1].cid + }) + }) + }) + + it('should list direct pins', (done) => { + ipfs.pin.ls({ type: 'direct' }, (err, pinset) => { + expect(err).to.not.exist() + expect(pinset).to.deep.include({ + type: 'direct', + hash: fixtures.files[1].cid + }) + done() + }) + }) + + it('should list pins for a specific hash', (done) => { + ipfs.pin.ls(fixtures.files[0].cid, (err, pinset) => { + expect(err).to.not.exist() + expect(pinset).to.deep.equal([{ + type: 'recursive', + hash: fixtures.files[0].cid + }]) + done() + }) + }) + + it('should list pins for a specific hash (promised)', () => { + return ipfs.pin.ls(fixtures.files[0].cid) + .then((pinset) => { + expect(pinset).to.deep.equal([{ + type: 'recursive', + hash: fixtures.files[0].cid + }]) + }) + }) + }) +} diff --git a/js/src/pin/rm.js b/js/src/pin/rm.js new file mode 100644 index 000000000..55565bb25 --- /dev/null +++ b/js/src/pin/rm.js @@ -0,0 +1,89 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const parallel = require('async/parallel') +const { fixtures } = require('./utils') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.pin', function () { + this.timeout(50 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + populate() + }) + }) + + function populate () { + parallel([ + (cb) => { + ipfs.files.add(fixtures.files[0].data, { pin: false }, (err, res) => { + if (err) return cb(err) + ipfs.pin.add(fixtures.files[0].cid, { recursive: true }, cb) + }) + }, + (cb) => { + ipfs.files.add(fixtures.files[1].data, { pin: false }, (err, res) => { + if (err) return cb(err) + ipfs.pin.add(fixtures.files[1].cid, { recursive: false }, cb) + }) + } + ], done) + } + }) + + after((done) => common.teardown(done)) + + it('should remove a recursive pin', (done) => { + ipfs.pin.rm(fixtures.files[0].cid, { recursive: true }, (err, pinset) => { + expect(err).to.not.exist() + expect(pinset).to.deep.equal([{ + hash: fixtures.files[0].cid + }]) + ipfs.pin.ls({ type: 'recursive' }, (err, pinset) => { + expect(err).to.not.exist() + expect(pinset).to.not.deep.include({ + hash: fixtures.files[0].cid, + type: 'recursive' + }) + done() + }) + }) + }) + + it('should remove a direct pin (promised)', () => { + return ipfs.pin.rm(fixtures.files[1].cid, { recursive: false }) + .then((pinset) => { + expect(pinset).to.deep.equal([{ + hash: fixtures.files[1].cid + }]) + return ipfs.pin.ls({ type: 'direct' }) + }) + .then((pinset) => { + expect(pinset).to.not.deep.include({ + hash: fixtures.files[1].cid + }) + }) + }) + }) +} diff --git a/js/src/pin/utils.js b/js/src/pin/utils.js new file mode 100644 index 000000000..5f1add5ce --- /dev/null +++ b/js/src/pin/utils.js @@ -0,0 +1,13 @@ +'use strict' + +const loadFixture = require('aegir/fixtures') + +exports.fixtures = Object.freeze({ + files: Object.freeze([Object.freeze({ + data: loadFixture('js/test/fixtures/testfile.txt', 'interface-ipfs-core'), + cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' + }), Object.freeze({ + data: loadFixture('js/test/fixtures/test-folder/files/hello.txt', 'interface-ipfs-core'), + cid: 'QmY9cxiHqTFoWamkQVkpmmqzBrY3hCBEL2XNu3NtX74Fuu' + })]) +}) From c60ceb2b837d9c538c717a2b028eb4ac15e043cc Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Fri, 8 Jun 2018 00:22:51 +0100 Subject: [PATCH 23/41] chore: cleanup requires License: MIT Signed-off-by: Alan Shaw --- js/src/block/get.js | 7 +- js/src/block/put.js | 7 +- js/src/block/stat.js | 7 +- js/src/bootstrap/add.js | 4 +- js/src/bootstrap/list.js | 4 +- js/src/bootstrap/rm.js | 4 +- js/src/config/get.js | 5 +- js/src/config/replace.js | 5 +- js/src/config/set.js | 5 +- js/src/dag/get.js | 7 +- js/src/dag/put.js | 7 +- js/src/dag/tree.js | 7 +- js/src/dht/findpeer.js | 5 +- js/src/dht/findprovs.js | 5 +- js/src/dht/get.js | 5 +- js/src/dht/provide.js | 5 +- js/src/dht/put.js | 3 +- js/src/dht/query.js | 5 +- js/src/files/add-pull-stream.js | 7 +- js/src/files/add-readable-stream.js | 7 +- js/src/files/add.js | 7 +- js/src/files/cat-pull-stream.js | 7 +- js/src/files/cat-readable-stream.js | 7 +- js/src/files/cat.js | 7 +- js/src/files/cp.js | 5 +- js/src/files/flush.js | 5 +- js/src/files/get-pull-stream.js | 7 +- js/src/files/get-readable-stream.js | 7 +- js/src/files/get.js | 7 +- js/src/files/ls.js | 5 +- js/src/files/mkdir.js | 5 +- js/src/files/mv.js | 5 +- js/src/files/read.js | 5 +- js/src/files/rm.js | 5 +- js/src/files/stat.js | 5 +- js/src/files/utils.js | 2 + js/src/files/write.js | 5 +- js/src/key/export.js | 7 +- js/src/key/gen.js | 7 +- js/src/key/import.js | 7 +- js/src/key/list.js | 7 +- js/src/key/rename.js | 7 +- js/src/key/rm.js | 7 +- js/src/ls/ls-pull-stream.js | 7 +- js/src/ls/ls-readable-stream.js | 7 +- js/src/ls/ls.js | 7 +- js/src/miscellaneous/dns.js | 5 +- js/src/miscellaneous/id.js | 5 +- js/src/miscellaneous/stop.js | 5 +- js/src/miscellaneous/version.js | 5 +- js/src/object/data.js | 2 - js/src/object/get.js | 2 - js/src/object/links.js | 2 - js/src/object/new.js | 2 - js/src/object/patch/add-link.js | 2 - js/src/object/patch/append-data.js | 2 - js/src/object/patch/rm-link.js | 2 - js/src/object/patch/set-data.js | 2 - js/src/object/put.js | 2 - js/src/object/stat.js | 2 - js/src/pin.js | 178 ---------------------------- js/src/ping/utils.js | 2 + js/src/repo/gc.js | 2 - js/src/repo/stat.js | 2 - js/src/repo/version.js | 2 - js/src/stats/bitswap.js | 2 - js/src/stats/bw-pull-stream.js | 2 - js/src/stats/bw-readable-stream.js | 2 - js/src/stats/bw.js | 2 - js/src/stats/repo.js | 2 - js/src/stats/utils.js | 3 - js/src/utils/mocha.js | 1 + js/src/utils/spawn.js | 2 + js/src/utils/suite.js | 2 + 74 files changed, 136 insertions(+), 378 deletions(-) delete mode 100644 js/src/pin.js diff --git a/js/src/block/get.js b/js/src/block/get.js index 944c50e3b..a077f614c 100644 --- a/js/src/block/get.js +++ b/js/src/block/get.js @@ -1,18 +1,17 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const multihash = require('multihashes') const CID = require('cids') const Buffer = require('safe-buffer').Buffer const auto = require('async/auto') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/block/put.js b/js/src/block/put.js index c6464045c..ad4fbbe41 100644 --- a/js/src/block/put.js +++ b/js/src/block/put.js @@ -1,18 +1,17 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const Block = require('ipfs-block') const multihash = require('multihashes') const CID = require('cids') const Buffer = require('safe-buffer').Buffer const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/block/stat.js b/js/src/block/stat.js index 2137c194e..49f3e320a 100644 --- a/js/src/block/stat.js +++ b/js/src/block/stat.js @@ -1,16 +1,15 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const CID = require('cids') const auto = require('async/auto') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/bootstrap/add.js b/js/src/bootstrap/add.js index e21a0bf44..a37f9efcd 100644 --- a/js/src/bootstrap/add.js +++ b/js/src/bootstrap/add.js @@ -1,12 +1,12 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -const { getDescribe, getIt } = require('../utils/mocha') const invalidArg = 'this/Is/So/Invalid/' const validIp4 = '/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z' diff --git a/js/src/bootstrap/list.js b/js/src/bootstrap/list.js index 1c8ad0be0..7c0093b74 100644 --- a/js/src/bootstrap/list.js +++ b/js/src/bootstrap/list.js @@ -1,12 +1,12 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/bootstrap/rm.js b/js/src/bootstrap/rm.js index d83d60e1e..b7f2e0233 100644 --- a/js/src/bootstrap/rm.js +++ b/js/src/bootstrap/rm.js @@ -1,12 +1,12 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -const { getDescribe, getIt } = require('../utils/mocha') const invalidArg = 'this/Is/So/Invalid/' diff --git a/js/src/config/get.js b/js/src/config/get.js index 2192b56d5..00e4b2637 100644 --- a/js/src/config/get.js +++ b/js/src/config/get.js @@ -1,13 +1,12 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/config/replace.js b/js/src/config/replace.js index 978e9faeb..b40684218 100644 --- a/js/src/config/replace.js +++ b/js/src/config/replace.js @@ -1,13 +1,12 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/config/set.js b/js/src/config/set.js index 796d0c244..1890bab77 100644 --- a/js/src/config/set.js +++ b/js/src/config/set.js @@ -1,13 +1,12 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/dag/get.js b/js/src/dag/get.js index d92ad0ae8..a06c17dff 100644 --- a/js/src/dag/get.js +++ b/js/src/dag/get.js @@ -1,12 +1,8 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const { series, eachSeries } = require('async') const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode @@ -14,6 +10,9 @@ const dagCBOR = require('ipld-dag-cbor') const { spawnNodeWithId } = require('../utils/spawn') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/dag/put.js b/js/src/dag/put.js index 9e48ea183..46605a7b8 100644 --- a/js/src/dag/put.js +++ b/js/src/dag/put.js @@ -1,12 +1,8 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode const dagCBOR = require('ipld-dag-cbor') @@ -14,6 +10,9 @@ const CID = require('cids') const { spawnNodeWithId } = require('../utils/spawn') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/dag/tree.js b/js/src/dag/tree.js index ebe615869..78d85fc28 100644 --- a/js/src/dag/tree.js +++ b/js/src/dag/tree.js @@ -1,18 +1,17 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const { series, eachSeries } = require('async') const dagPB = require('ipld-dag-pb') const dagCBOR = require('ipld-dag-cbor') const { spawnNodeWithId } = require('../utils/spawn') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/dht/findpeer.js b/js/src/dht/findpeer.js index 272e4ff24..962bfade5 100644 --- a/js/src/dht/findpeer.js +++ b/js/src/dht/findpeer.js @@ -3,11 +3,12 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/dht/findprovs.js b/js/src/dht/findprovs.js index 2c39eef8d..0205a88f3 100644 --- a/js/src/dht/findprovs.js +++ b/js/src/dht/findprovs.js @@ -3,13 +3,14 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const waterfall = require('async/waterfall') const CID = require('cids') const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/dht/get.js b/js/src/dht/get.js index 52aad028c..6cffd201f 100644 --- a/js/src/dht/get.js +++ b/js/src/dht/get.js @@ -3,12 +3,13 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const waterfall = require('async/waterfall') const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/dht/provide.js b/js/src/dht/provide.js index 158b68599..eb79b02b6 100644 --- a/js/src/dht/provide.js +++ b/js/src/dht/provide.js @@ -3,11 +3,12 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const CID = require('cids') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/dht/put.js b/js/src/dht/put.js index 8337db19d..c315d87fc 100644 --- a/js/src/dht/put.js +++ b/js/src/dht/put.js @@ -3,9 +3,10 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/dht/query.js b/js/src/dht/query.js index 581475f33..411bef39c 100644 --- a/js/src/dht/query.js +++ b/js/src/dht/query.js @@ -3,11 +3,12 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/files/add-pull-stream.js b/js/src/files/add-pull-stream.js index b8a16cc65..1d6782b46 100644 --- a/js/src/files/add-pull-stream.js +++ b/js/src/files/add-pull-stream.js @@ -1,16 +1,15 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const { fixtures } = require('./utils') const pull = require('pull-stream') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/files/add-readable-stream.js b/js/src/files/add-readable-stream.js index 60388a9e3..27d84a223 100644 --- a/js/src/files/add-readable-stream.js +++ b/js/src/files/add-readable-stream.js @@ -1,15 +1,14 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const { fixtures } = require('./utils') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/files/add.js b/js/src/files/add.js index 72eca0708..8ffb20677 100644 --- a/js/src/files/add.js +++ b/js/src/files/add.js @@ -1,12 +1,8 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const { fixtures } = require('./utils') const Readable = require('readable-stream').Readable const pull = require('pull-stream') @@ -14,6 +10,9 @@ const path = require('path') const expectTimeout = require('../utils/expect-timeout') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/files/cat-pull-stream.js b/js/src/files/cat-pull-stream.js index 0539d0b27..7d067dd23 100644 --- a/js/src/files/cat-pull-stream.js +++ b/js/src/files/cat-pull-stream.js @@ -1,16 +1,15 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const { fixtures } = require('./utils') const pull = require('pull-stream') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/files/cat-readable-stream.js b/js/src/files/cat-readable-stream.js index c7d98d821..44a5fbbe4 100644 --- a/js/src/files/cat-readable-stream.js +++ b/js/src/files/cat-readable-stream.js @@ -1,16 +1,15 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const { fixtures } = require('./utils') const bl = require('bl') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/files/cat.js b/js/src/files/cat.js index d7d1d7ab3..ab0da7041 100644 --- a/js/src/files/cat.js +++ b/js/src/files/cat.js @@ -1,18 +1,17 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const { fixtures } = require('./utils') const bs58 = require('bs58') const parallel = require('async/parallel') const CID = require('cids') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/files/cp.js b/js/src/files/cp.js index 2e8d4e08a..d38533994 100644 --- a/js/src/files/cp.js +++ b/js/src/files/cp.js @@ -1,15 +1,14 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const series = require('async/series') const hat = require('hat') const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/flush.js b/js/src/files/flush.js index eb9f4f20b..638ca6063 100644 --- a/js/src/files/flush.js +++ b/js/src/files/flush.js @@ -1,15 +1,14 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const series = require('async/series') const hat = require('hat') const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/get-pull-stream.js b/js/src/files/get-pull-stream.js index d4ccacd40..b87b2591f 100644 --- a/js/src/files/get-pull-stream.js +++ b/js/src/files/get-pull-stream.js @@ -1,16 +1,15 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const { fixtures } = require('./utils') const pull = require('pull-stream') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/files/get-readable-stream.js b/js/src/files/get-readable-stream.js index 92d17f0f2..55255cdd7 100644 --- a/js/src/files/get-readable-stream.js +++ b/js/src/files/get-readable-stream.js @@ -1,17 +1,16 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const { fixtures } = require('./utils') const concat = require('concat-stream') const through = require('through2') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/files/get.js b/js/src/files/get.js index 285f0a874..0b0c439fc 100644 --- a/js/src/files/get.js +++ b/js/src/files/get.js @@ -1,18 +1,17 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const { fixtures } = require('./utils') const bs58 = require('bs58') const parallel = require('async/parallel') const series = require('async/series') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/files/ls.js b/js/src/files/ls.js index a1ee6426e..dbfd50637 100644 --- a/js/src/files/ls.js +++ b/js/src/files/ls.js @@ -1,15 +1,14 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const series = require('async/series') const hat = require('hat') const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/mkdir.js b/js/src/files/mkdir.js index b7fc25523..a9ce54f8e 100644 --- a/js/src/files/mkdir.js +++ b/js/src/files/mkdir.js @@ -1,14 +1,13 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const hat = require('hat') const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/mv.js b/js/src/files/mv.js index 3e3290341..b0727c281 100644 --- a/js/src/files/mv.js +++ b/js/src/files/mv.js @@ -1,15 +1,14 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const series = require('async/series') const hat = require('hat') const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/read.js b/js/src/files/read.js index 2b09f85ab..82b48a6ce 100644 --- a/js/src/files/read.js +++ b/js/src/files/read.js @@ -1,15 +1,14 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const series = require('async/series') const hat = require('hat') const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/rm.js b/js/src/files/rm.js index 0a5346f3c..5e002415f 100644 --- a/js/src/files/rm.js +++ b/js/src/files/rm.js @@ -1,15 +1,14 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const series = require('async/series') const hat = require('hat') const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/stat.js b/js/src/files/stat.js index 28c5dde64..62aea64be 100644 --- a/js/src/files/stat.js +++ b/js/src/files/stat.js @@ -1,6 +1,4 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') @@ -8,9 +6,10 @@ const series = require('async/series') const hat = require('hat') const { fixtures } = require('./utils') const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/utils.js b/js/src/files/utils.js index 28bd24570..d6dcd1ece 100644 --- a/js/src/files/utils.js +++ b/js/src/files/utils.js @@ -1,3 +1,5 @@ +'use strict' + const loadFixture = require('aegir/fixtures') exports.fixtures = Object.freeze({ diff --git a/js/src/files/write.js b/js/src/files/write.js index 5739cf014..0e9bb65f2 100644 --- a/js/src/files/write.js +++ b/js/src/files/write.js @@ -1,14 +1,13 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const hat = require('hat') const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/key/export.js b/js/src/key/export.js index a21d66666..2d43b4412 100644 --- a/js/src/key/export.js +++ b/js/src/key/export.js @@ -1,15 +1,14 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const hat = require('hat') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/key/gen.js b/js/src/key/gen.js index 3da9961c1..a1648576b 100644 --- a/js/src/key/gen.js +++ b/js/src/key/gen.js @@ -1,15 +1,14 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const hat = require('hat') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/key/import.js b/js/src/key/import.js index 863d4c84d..90ce43308 100644 --- a/js/src/key/import.js +++ b/js/src/key/import.js @@ -1,15 +1,14 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const hat = require('hat') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/key/list.js b/js/src/key/list.js index 6948a0729..b1a92790b 100644 --- a/js/src/key/list.js +++ b/js/src/key/list.js @@ -1,16 +1,15 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') const times = require('async/times') -const expect = chai.expect -chai.use(dirtyChai) const hat = require('hat') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/key/rename.js b/js/src/key/rename.js index f352f1cea..f1f98c2d8 100644 --- a/js/src/key/rename.js +++ b/js/src/key/rename.js @@ -1,15 +1,14 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const hat = require('hat') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/key/rm.js b/js/src/key/rm.js index 864065efc..92f87ce30 100644 --- a/js/src/key/rm.js +++ b/js/src/key/rm.js @@ -1,15 +1,14 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const hat = require('hat') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/ls/ls-pull-stream.js b/js/src/ls/ls-pull-stream.js index 7a00e2d3d..e7b4602e1 100644 --- a/js/src/ls/ls-pull-stream.js +++ b/js/src/ls/ls-pull-stream.js @@ -1,16 +1,15 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const { fixtures } = require('../files/utils') const pull = require('pull-stream') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/ls/ls-readable-stream.js b/js/src/ls/ls-readable-stream.js index ef75b0816..aa5c45f18 100644 --- a/js/src/ls/ls-readable-stream.js +++ b/js/src/ls/ls-readable-stream.js @@ -1,16 +1,15 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const { fixtures } = require('../files/utils') const concat = require('concat-stream') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/ls/ls.js b/js/src/ls/ls.js index 3883b7bba..fbc3e6425 100644 --- a/js/src/ls/ls.js +++ b/js/src/ls/ls.js @@ -1,15 +1,14 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) const { fixtures } = require('../files/utils') const { getDescribe, getIt } = require('../utils/mocha') +const expect = chai.expect +chai.use(dirtyChai) + module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/miscellaneous/dns.js b/js/src/miscellaneous/dns.js index 1851a9d2c..19e4db528 100644 --- a/js/src/miscellaneous/dns.js +++ b/js/src/miscellaneous/dns.js @@ -1,13 +1,12 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/miscellaneous/id.js b/js/src/miscellaneous/id.js index d60114407..d9da654fb 100644 --- a/js/src/miscellaneous/id.js +++ b/js/src/miscellaneous/id.js @@ -1,13 +1,12 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/miscellaneous/stop.js b/js/src/miscellaneous/stop.js index a2982fbbe..c550f6bd8 100644 --- a/js/src/miscellaneous/stop.js +++ b/js/src/miscellaneous/stop.js @@ -1,13 +1,12 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/miscellaneous/version.js b/js/src/miscellaneous/version.js index 3d7f06e7a..8c7320af7 100644 --- a/js/src/miscellaneous/version.js +++ b/js/src/miscellaneous/version.js @@ -1,13 +1,12 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') const dirtyChai = require('dirty-chai') +const { getDescribe, getIt } = require('../utils/mocha') + const expect = chai.expect chai.use(dirtyChai) -const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/object/data.js b/js/src/object/data.js index c1ea8e45b..d26bb06ae 100644 --- a/js/src/object/data.js +++ b/js/src/object/data.js @@ -1,6 +1,4 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') diff --git a/js/src/object/get.js b/js/src/object/get.js index c75f6d123..42ee4a459 100644 --- a/js/src/object/get.js +++ b/js/src/object/get.js @@ -1,6 +1,4 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') diff --git a/js/src/object/links.js b/js/src/object/links.js index beddb2abd..63d891e67 100644 --- a/js/src/object/links.js +++ b/js/src/object/links.js @@ -1,6 +1,4 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') diff --git a/js/src/object/new.js b/js/src/object/new.js index d05f5ac44..3f0019c30 100644 --- a/js/src/object/new.js +++ b/js/src/object/new.js @@ -1,6 +1,4 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') diff --git a/js/src/object/patch/add-link.js b/js/src/object/patch/add-link.js index 03cba11fa..202f7c3bc 100644 --- a/js/src/object/patch/add-link.js +++ b/js/src/object/patch/add-link.js @@ -1,6 +1,4 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') diff --git a/js/src/object/patch/append-data.js b/js/src/object/patch/append-data.js index 4ee678740..9cc3ee3f2 100644 --- a/js/src/object/patch/append-data.js +++ b/js/src/object/patch/append-data.js @@ -1,6 +1,4 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') diff --git a/js/src/object/patch/rm-link.js b/js/src/object/patch/rm-link.js index 827175817..ad1db0b5f 100644 --- a/js/src/object/patch/rm-link.js +++ b/js/src/object/patch/rm-link.js @@ -1,6 +1,4 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') diff --git a/js/src/object/patch/set-data.js b/js/src/object/patch/set-data.js index ef9e73c54..3d46302a3 100644 --- a/js/src/object/patch/set-data.js +++ b/js/src/object/patch/set-data.js @@ -1,6 +1,4 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') diff --git a/js/src/object/put.js b/js/src/object/put.js index 4391425a0..3324b37e1 100644 --- a/js/src/object/put.js +++ b/js/src/object/put.js @@ -1,6 +1,4 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') diff --git a/js/src/object/stat.js b/js/src/object/stat.js index af266ac52..966bd8112 100644 --- a/js/src/object/stat.js +++ b/js/src/object/stat.js @@ -1,6 +1,4 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') diff --git a/js/src/pin.js b/js/src/pin.js deleted file mode 100644 index 1fc507ff5..000000000 --- a/js/src/pin.js +++ /dev/null @@ -1,178 +0,0 @@ -/* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - -'use strict' - -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) -const loadFixture = require('aegir/fixtures') - -const testFile = loadFixture('js/test/fixtures/testfile.txt', 'interface-ipfs-core') -const testHash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - -module.exports = (common) => { - describe('.pin', function () { - this.timeout(50 * 1000) - - let ipfs - - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - populate() - }) - }) - - function populate () { - ipfs.files.add(testFile, (err, res) => { - expect(err).to.not.exist() - expect(res).to.have.length(1) - expect(res[0].hash).to.equal(testHash) - expect(res[0].path).to.equal(testHash) - done() - }) - } - }) - - after((done) => common.teardown(done)) - - describe('callback API', () => { - // 1st, because ipfs.files.add pins automatically - it('.ls type recursive', (done) => { - ipfs.pin.ls({ type: 'recursive' }, (err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.deep.include({ - hash: testHash, - type: 'recursive' - }) - done() - }) - }) - - it.skip('.ls type indirect', (done) => { - ipfs.pin.ls({ type: 'indirect' }, (err, pinset) => { - expect(err).to.not.exist() - // because the pinned file has no links - expect(pinset).to.be.empty() - done() - }) - }) - - it('.rm', (done) => { - ipfs.pin.rm(testHash, { recursive: true }, (err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.deep.equal([{ - hash: testHash - }]) - ipfs.pin.ls({ type: 'direct' }, (err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.not.deep.include({ - hash: testHash, - type: 'recursive' - }) - done() - }) - }) - }) - - it('.add', (done) => { - ipfs.pin.add(testHash, { recursive: false }, (err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.deep.equal([{ - hash: testHash - }]) - done() - }) - }) - - it('.ls', (done) => { - ipfs.pin.ls((err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.not.be.empty() - expect(pinset).to.deep.include({ - hash: testHash, - type: 'direct' - }) - done() - }) - }) - - it('.ls type direct', (done) => { - ipfs.pin.ls({ type: 'direct' }, (err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.deep.include({ - hash: testHash, - type: 'direct' - }) - done() - }) - }) - - it('.ls for a specific hash', (done) => { - ipfs.pin.ls(testHash, (err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.deep.equal([{ - hash: testHash, - type: 'direct' - }]) - done() - }) - }) - }) - - describe('promise API', () => { - it('.add', () => { - return ipfs.pin.add(testHash, { recursive: false }) - .then((pinset) => { - expect(pinset).to.deep.equal([{ - hash: testHash - }]) - }) - }) - - it('.ls', () => { - return ipfs.pin.ls() - .then((pinset) => { - expect(pinset).to.deep.include({ - hash: testHash, - type: 'direct' - }) - }) - }) - - it('.ls hash', () => { - return ipfs.pin.ls(testHash) - .then((pinset) => { - expect(pinset).to.deep.equal([{ - hash: testHash, - type: 'direct' - }]) - }) - }) - - it('.rm', () => { - return ipfs.pin.rm(testHash, { recursive: false }) - .then((pinset) => { - expect(pinset).to.deep.equal([{ - hash: testHash - }]) - return ipfs.pin.ls({ type: 'direct' }) - }) - .then((pinset) => { - expect(pinset).to.not.deep.include({ - hash: testHash - }) - }) - }) - }) - }) -} diff --git a/js/src/ping/utils.js b/js/src/ping/utils.js index beae05769..94fe90f58 100644 --- a/js/src/ping/utils.js +++ b/js/src/ping/utils.js @@ -1,3 +1,5 @@ +'use strict' + const chai = require('chai') const dirtyChai = require('dirty-chai') diff --git a/js/src/repo/gc.js b/js/src/repo/gc.js index 2fd1b74dd..4abab3944 100644 --- a/js/src/repo/gc.js +++ b/js/src/repo/gc.js @@ -1,6 +1,4 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') diff --git a/js/src/repo/stat.js b/js/src/repo/stat.js index b8bc85dda..0a0569f5b 100644 --- a/js/src/repo/stat.js +++ b/js/src/repo/stat.js @@ -1,6 +1,4 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') diff --git a/js/src/repo/version.js b/js/src/repo/version.js index aa6ebd4ee..9c04dcf9b 100644 --- a/js/src/repo/version.js +++ b/js/src/repo/version.js @@ -1,6 +1,4 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') diff --git a/js/src/stats/bitswap.js b/js/src/stats/bitswap.js index d00d8b00d..d1c8ea213 100644 --- a/js/src/stats/bitswap.js +++ b/js/src/stats/bitswap.js @@ -1,6 +1,4 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') diff --git a/js/src/stats/bw-pull-stream.js b/js/src/stats/bw-pull-stream.js index 667337a31..ba810e4da 100644 --- a/js/src/stats/bw-pull-stream.js +++ b/js/src/stats/bw-pull-stream.js @@ -1,6 +1,4 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') diff --git a/js/src/stats/bw-readable-stream.js b/js/src/stats/bw-readable-stream.js index 7eb05d496..6f589dd2f 100644 --- a/js/src/stats/bw-readable-stream.js +++ b/js/src/stats/bw-readable-stream.js @@ -1,6 +1,4 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') diff --git a/js/src/stats/bw.js b/js/src/stats/bw.js index 6668e5d4f..66c726f26 100644 --- a/js/src/stats/bw.js +++ b/js/src/stats/bw.js @@ -1,6 +1,4 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') diff --git a/js/src/stats/repo.js b/js/src/stats/repo.js index c6dabbf68..80254bdd1 100644 --- a/js/src/stats/repo.js +++ b/js/src/stats/repo.js @@ -1,6 +1,4 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const chai = require('chai') diff --git a/js/src/stats/utils.js b/js/src/stats/utils.js index 3d1a584ab..8ada68103 100644 --- a/js/src/stats/utils.js +++ b/js/src/stats/utils.js @@ -1,6 +1,3 @@ -/* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - 'use strict' const { expect } = require('chai') diff --git a/js/src/utils/mocha.js b/js/src/utils/mocha.js index e4004c7a0..99f633cb0 100644 --- a/js/src/utils/mocha.js +++ b/js/src/utils/mocha.js @@ -1,4 +1,5 @@ /* eslint-env mocha */ +'use strict' // Get a "describe" function that is optionally 'skipped' or 'onlyed' // If skip/only are boolean true, then we want to skip/only the whole suite diff --git a/js/src/utils/spawn.js b/js/src/utils/spawn.js index ea985932f..792d482bc 100644 --- a/js/src/utils/spawn.js +++ b/js/src/utils/spawn.js @@ -1,3 +1,5 @@ +'use strict' + const waterfall = require('async/waterfall') const timesSeries = require('async/timesSeries') const map = require('async/map') diff --git a/js/src/utils/suite.js b/js/src/utils/suite.js index 4d8b1ca5f..2ea1707dd 100644 --- a/js/src/utils/suite.js +++ b/js/src/utils/suite.js @@ -1,3 +1,5 @@ +'use strict' + function createSuite (tests, parent) { const suite = (createCommon, options) => { Object.keys(tests).forEach(t => { From bb72445907f7a816ae6650ee0a0b1e0993e12cf2 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Fri, 8 Jun 2018 11:16:28 +0100 Subject: [PATCH 24/41] feat: modularises pubsub License: MIT Signed-off-by: Alan Shaw --- js/src/pubsub.js | 678 ----------------------------------- js/src/pubsub/index.js | 12 + js/src/pubsub/ls.js | 94 +++++ js/src/pubsub/peers.js | 156 ++++++++ js/src/pubsub/publish.js | 64 ++++ js/src/pubsub/subscribe.js | 405 +++++++++++++++++++++ js/src/pubsub/unsubscribe.js | 67 ++++ js/src/pubsub/utils.js | 44 +++ 8 files changed, 842 insertions(+), 678 deletions(-) delete mode 100644 js/src/pubsub.js create mode 100644 js/src/pubsub/index.js create mode 100644 js/src/pubsub/ls.js create mode 100644 js/src/pubsub/peers.js create mode 100644 js/src/pubsub/publish.js create mode 100644 js/src/pubsub/subscribe.js create mode 100644 js/src/pubsub/unsubscribe.js create mode 100644 js/src/pubsub/utils.js diff --git a/js/src/pubsub.js b/js/src/pubsub.js deleted file mode 100644 index fc97c522e..000000000 --- a/js/src/pubsub.js +++ /dev/null @@ -1,678 +0,0 @@ -/* eslint-env mocha */ -/* eslint max-nested-callbacks: ['error', 8] */ -'use strict' - -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) -const series = require('async/series') -const each = require('async/each') -const parallel = require('async/parallel') -const whilst = require('async/whilst') -const hat = require('hat') -const { spawnNodesWithId } = require('./utils/spawn') - -// On Browsers it will be false, but the tests currently aren't run -// there anyway -let isWindows = process.platform && process.platform === 'win32' - -function waitForPeers (ipfs, topic, peersToWait, callback) { - const i = setInterval(() => { - ipfs.pubsub.peers(topic, (err, peers) => { - if (err) { - clearInterval(i) - return callback(err) - } - - const missingPeers = peersToWait - .map((e) => peers.indexOf(e) !== -1) - .filter((e) => !e) - - if (missingPeers.length === 0) { - clearInterval(i) - callback() - } - }) - }, 500) -} - -function makeCheck (n, done) { - let i = 0 - return (err) => { - if (err) { - return done(err) - } - - if (++i === n) { - done() - } - } -} - -module.exports = (common) => { - describe('.pubsub', function () { - this.timeout(80 * 1000) - - const getTopic = () => 'pubsub-tests-' + hat() - - let ipfs1 - let ipfs2 - let ipfs3 - let withGo - - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(100 * 1000) - - common.setup((err, factory) => { - if (err) { - return done(err) - } - - spawnNodesWithId(3, factory, (err, nodes) => { - if (err) { - return done(err) - } - - ipfs1 = nodes[0] - ipfs2 = nodes[1] - ipfs3 = nodes[2] - - ipfs1.id((err, id) => { - expect(err).to.not.exist() - withGo = id.agentVersion.startsWith('go-ipfs') - done() - }) - }) - }) - }) - - after((done) => { - common.teardown(done) - }) - - describe('single node', () => { - describe('.publish', () => { - it('errors on string messags', (done) => { - const topic = getTopic() - ipfs1.pubsub.publish(topic, 'hello friend', (err) => { - expect(err).to.exist() - done() - }) - }) - - it('message from buffer', (done) => { - const topic = getTopic() - ipfs1.pubsub.publish(topic, Buffer.from('hello friend'), done) - }) - }) - - describe('.subscribe', () => { - it('to one topic', (done) => { - const check = makeCheck(2, done) - const topic = getTopic() - - const handler = (msg) => { - expect(msg.data.toString()).to.equal('hi') - expect(msg).to.have.property('seqno') - expect(Buffer.isBuffer(msg.seqno)).to.eql(true) - expect(msg).to.have.property('topicIDs').eql([topic]) - expect(msg).to.have.property('from', ipfs1.peerId.id) - - ipfs1.pubsub.unsubscribe(topic, handler, (err) => { - expect(err).to.not.exist() - - ipfs1.pubsub.ls((err, topics) => { - expect(err).to.not.exist() - expect(topics).to.be.empty() - check() - }) - }) - } - - ipfs1.pubsub.subscribe(topic, handler, (err) => { - expect(err).to.not.exist() - ipfs1.pubsub.publish(topic, Buffer.from('hi'), check) - }) - }) - - it('to one topic with Promise', (done) => { - const check = makeCheck(2, done) - const topic = getTopic() - - const handler = (msg) => { - expect(msg.data.toString()).to.equal('hi') - expect(msg).to.have.property('seqno') - expect(Buffer.isBuffer(msg.seqno)).to.eql(true) - expect(msg).to.have.property('topicIDs').eql([topic]) - expect(msg).to.have.property('from', ipfs1.peerId.id) - - ipfs1.pubsub.unsubscribe(topic, handler, (err) => { - expect(err).to.not.exist() - - ipfs1.pubsub.ls((err, topics) => { - expect(err).to.not.exist() - expect(topics).to.be.empty() - check() - }) - }) - } - - ipfs1.pubsub - .subscribe(topic, handler) - .then(() => ipfs1.pubsub.publish(topic, Buffer.from('hi'), check)) - .catch((err) => expect(err).to.not.exist()) - }) - - it('to one topic with options and Promise', (done) => { - const check = makeCheck(2, done) - const topic = getTopic() - - const handler = (msg) => { - expect(msg.data.toString()).to.equal('hi') - expect(msg).to.have.property('seqno') - expect(Buffer.isBuffer(msg.seqno)).to.eql(true) - expect(msg).to.have.property('topicIDs').eql([topic]) - expect(msg).to.have.property('from', ipfs1.peerId.id) - - ipfs1.pubsub.unsubscribe(topic, handler, (err) => { - expect(err).to.not.exist() - - ipfs1.pubsub.ls((err, topics) => { - expect(err).to.not.exist() - expect(topics).to.be.empty() - check() - }) - }) - } - - ipfs1.pubsub - .subscribe(topic, handler, {}) - .then(() => ipfs1.pubsub.publish(topic, Buffer.from('hi'), check)) - .catch((err) => expect(err).to.not.exist()) - }) - - it('attaches multiple event listeners', (done) => { - const topic = getTopic() - - const check = makeCheck(3, done) - const handler1 = (msg) => { - expect(msg.data.toString()).to.eql('hello') - - series([ - (cb) => ipfs1.pubsub.unsubscribe(topic, handler1, cb), - (cb) => ipfs1.pubsub.ls(cb), - (cb) => ipfs1.pubsub.unsubscribe(topic, handler2, cb), - (cb) => ipfs1.pubsub.ls(cb) - ], (err, res) => { - expect(err).to.not.exist() - - // Still subscribed as there is one listener left - expect(res[1]).to.eql([topic]) - // Now all listeners are gone no subscription anymore - expect(res[3]).to.eql([]) - check() - }) - } - - const handler2 = (msg) => { - expect(msg.data.toString()).to.eql('hello') - check() - } - - parallel([ - (cb) => ipfs1.pubsub.subscribe(topic, handler1, cb), - (cb) => ipfs1.pubsub.subscribe(topic, handler2, cb) - ], (err) => { - expect(err).to.not.exist() - ipfs1.pubsub.publish(topic, Buffer.from('hello'), check) - }) - }) - - it('discover options', (done) => { - const check = makeCheck(2, done) - const topic = getTopic() - - const handler = (msg) => { - expect(msg.data.toString()).to.eql('hi') - ipfs1.pubsub.unsubscribe(topic, handler, check) - } - - ipfs1.pubsub.subscribe(topic, handler, { discover: true }, (err) => { - expect(err).to.not.exist() - ipfs1.pubsub.publish(topic, Buffer.from('hi'), check) - }) - }) - }) - }) - - describe('multiple nodes connected', () => { - before((done) => { - parallel([ - (cb) => ipfs1.swarm.connect(ipfs2.peerId.addresses.find((a) => a.includes('127.0.0.1')), cb), - (cb) => ipfs2.swarm.connect(ipfs3.peerId.addresses.find((a) => a.includes('127.0.0.1')), cb), - (cb) => ipfs1.swarm.connect(ipfs3.peerId.addresses.find((a) => a.includes('127.0.0.1')), cb) - ], (err) => { - if (err) { - return done(err) - } - // give some time to let everything connect - setTimeout(done, 300) - }) - }) - - describe('.peers', () => { - it('does not error when not subscribed to a topic', (done) => { - const topic = getTopic() - ipfs1.pubsub.peers(topic, (err, peers) => { - expect(err).to.not.exist() - // Should be empty() but as mentioned below go-ipfs returns more than it should - // expect(peers).to.be.empty() - - done() - }) - }) - - it("doesn't return extra peers", (done) => { - // Currently go-ipfs returns peers that have not been - // subscribed to the topic. Enable when go-ipfs has been fixed - const sub1 = (msg) => {} - const sub2 = (msg) => {} - const sub3 = (msg) => {} - - const topic = getTopic() - const topicOther = topic + 'different topic' - - series([ - (cb) => ipfs1.pubsub.subscribe(topic, sub1, cb), - (cb) => ipfs2.pubsub.subscribe(topicOther, sub2, cb), - (cb) => ipfs3.pubsub.subscribe(topicOther, sub3, cb) - ], (err) => { - expect(err).to.not.exist() - - ipfs1.pubsub.peers(topic, (err, peers) => { - expect(err).to.not.exist() - expect(peers).to.be.empty() - - parallel([ - (cb) => ipfs1.pubsub.unsubscribe(topic, sub1, cb), - (cb) => ipfs2.pubsub.unsubscribe(topicOther, sub2, cb), - (cb) => ipfs3.pubsub.unsubscribe(topicOther, sub3, cb) - ], done) - }) - }) - }) - - it('returns peers for a topic - one peer', (done) => { - // Currently go-ipfs returns peers that have not been - // subscribed to the topic. Enable when go-ipfs has been fixed - const sub1 = (msg) => {} - const sub2 = (msg) => {} - const sub3 = (msg) => {} - const topic = getTopic() - - series([ - (cb) => ipfs1.pubsub.subscribe(topic, sub1, cb), - (cb) => ipfs2.pubsub.subscribe(topic, sub2, cb), - (cb) => ipfs3.pubsub.subscribe(topic, sub3, cb), - (cb) => waitForPeers(ipfs1, topic, [ipfs2.peerId.id], cb) - ], (err) => { - expect(err).to.not.exist() - - parallel([ - (cb) => ipfs1.pubsub.unsubscribe(topic, sub1, cb), - (cb) => ipfs2.pubsub.unsubscribe(topic, sub2, cb), - (cb) => ipfs3.pubsub.unsubscribe(topic, sub3, cb) - ], done) - }) - }) - - it('lists peers for a topic - multiple peers', (done) => { - const sub1 = (msg) => {} - const sub2 = (msg) => {} - const sub3 = (msg) => {} - const topic = getTopic() - - series([ - (cb) => ipfs1.pubsub.subscribe(topic, sub1, cb), - (cb) => ipfs2.pubsub.subscribe(topic, sub2, cb), - (cb) => ipfs3.pubsub.subscribe(topic, sub3, cb), - (cb) => waitForPeers(ipfs1, topic, [ - ipfs2.peerId.id, - ipfs3.peerId.id - ], cb) - ], (err) => { - expect(err).to.not.exist() - - parallel([ - (cb) => ipfs1.pubsub.unsubscribe(topic, sub1, cb), - (cb) => ipfs2.pubsub.unsubscribe(topic, sub2, cb), - (cb) => ipfs3.pubsub.unsubscribe(topic, sub3, cb) - ], done) - }) - }) - }) - - describe('.ls', () => { - it('empty() list when no topics are subscribed', (done) => { - ipfs1.pubsub.ls((err, topics) => { - expect(err).to.not.exist() - expect(topics.length).to.equal(0) - done() - }) - }) - - it('list with 1 subscribed topic', (done) => { - const sub1 = (msg) => {} - const topic = getTopic() - - ipfs1.pubsub.subscribe(topic, sub1, (err) => { - expect(err).to.not.exist() - - ipfs1.pubsub.ls((err, topics) => { - expect(err).to.not.exist() - expect(topics).to.be.eql([topic]) - - ipfs1.pubsub.unsubscribe(topic, sub1, done) - }) - }) - }) - - it('list with 3 subscribed topics', (done) => { - const topics = [{ - name: 'one', - handler () {} - }, { - name: 'two', - handler () {} - }, { - name: 'three', - handler () {} - }] - - each(topics, (t, cb) => { - ipfs1.pubsub.subscribe(t.name, t.handler, cb) - }, (err) => { - expect(err).to.not.exist() - ipfs1.pubsub.ls((err, list) => { - expect(err).to.not.exist() - - expect(list.sort()) - .to.eql(topics.map((t) => t.name).sort()) - - parallel(topics.map((t) => { - return (cb) => ipfs1.pubsub.unsubscribe(t.name, t.handler, cb) - }), done) - }) - }) - }) - }) - - describe('multiple nodes', () => { - let topic - let sub1 - let sub2 - - beforeEach(() => { - topic = getTopic() - }) - - afterEach((done) => { - parallel([ - (cb) => ipfs1.pubsub.unsubscribe(topic, sub1, cb), - (cb) => ipfs2.pubsub.unsubscribe(topic, sub2, cb) - ], done) - }) - - it('receive messages from different node', (done) => { - const check = makeCheck(3, done) - const expectedString = 'hello from the other side' - - sub1 = (msg) => { - expect(msg.data.toString()).to.be.eql(expectedString) - expect(msg.from).to.eql(ipfs2.peerId.id) - check() - } - - sub2 = (msg) => { - expect(msg.data.toString()).to.be.eql(expectedString) - expect(msg.from).to.eql(ipfs2.peerId.id) - check() - } - - series([ - (cb) => ipfs1.pubsub.subscribe(topic, sub1, cb), - (cb) => ipfs2.pubsub.subscribe(topic, sub2, cb), - (cb) => waitForPeers(ipfs2, topic, [ipfs1.peerId.id], cb) - ], (err) => { - expect(err).to.not.exist() - - ipfs2.pubsub.publish(topic, Buffer.from(expectedString), check) - }) - }) - - it('round trips a non-utf8 binary buffer correctly', (done) => { - const check = makeCheck(3, done) - const expectedHex = 'a36161636179656162830103056164a16466666666f4' - const buffer = Buffer.from(expectedHex, 'hex') - - sub1 = (msg) => { - try { - expect(msg.data.toString('hex')).to.be.eql(expectedHex) - expect(msg.from).to.eql(ipfs2.peerId.id) - check() - } catch (err) { - check(err) - } - } - - sub2 = (msg) => { - try { - expect(msg.data.toString('hex')).to.eql(expectedHex) - expect(msg.from).to.eql(ipfs2.peerId.id) - check() - } catch (err) { - check(err) - } - } - - series([ - (cb) => ipfs1.pubsub.subscribe(topic, sub1, cb), - (cb) => ipfs2.pubsub.subscribe(topic, sub2, cb), - (cb) => waitForPeers(ipfs2, topic, [ipfs1.peerId.id], cb) - ], (err) => { - expect(err).to.not.exist() - - ipfs2.pubsub.publish(topic, buffer, check) - }) - }) - - it('receive multiple messages', function (done) { - // TODO fix https://github.com/ipfs/interface-ipfs-core/pull/188#issuecomment-354673246 - // and https://github.com/ipfs/go-ipfs/issues/4778 - if (withGo && isWindows) { - this.skip() - } - - const inbox1 = [] - const inbox2 = [] - const outbox = ['hello', 'world', 'this', 'is', 'pubsub'] - - const check = makeCheck(outbox.length * 3, (err) => { - expect(inbox1.sort()).to.eql(outbox.sort()) - expect(inbox2.sort()).to.eql(outbox.sort()) - - done(err) - }) - - sub1 = (msg) => { - inbox1.push(msg.data.toString()) - expect(msg.from).to.eql(ipfs2.peerId.id) - check() - } - - sub2 = (msg) => { - inbox2.push(msg.data.toString()) - expect(msg.from).to.be.eql(ipfs2.peerId.id) - check() - } - - series([ - (cb) => ipfs1.pubsub.subscribe(topic, sub1, cb), - (cb) => ipfs2.pubsub.subscribe(topic, sub2, cb), - (cb) => waitForPeers(ipfs2, topic, [ipfs1.peerId.id], cb) - ], (err) => { - expect(err).to.not.exist() - - outbox.forEach((msg) => { - ipfs2.pubsub.publish(topic, Buffer.from(msg), check) - }) - }) - }) - }) - - describe('light-load tests', function () { - before(() => { - ipfs1.pubsub.setMaxListeners(10 * 10) - ipfs2.pubsub.setMaxListeners(10 * 10) - }) - - after(() => { - ipfs1.pubsub.setMaxListeners(10) - ipfs2.pubsub.setMaxListeners(10) - }) - - it('call publish 10 times', (done) => { - const count = 10 - let sendCount = 0 - const topic = getTopic() - - whilst( - () => sendCount < count, - (cb) => { - sendCount++ - ipfs1.pubsub.publish(topic, Buffer.from('hey there'), cb) - }, - done - ) - }) - - describe('send/receive', () => { - let topic - let sub1 - let sub2 - - beforeEach(function () { - // TODO fix https://github.com/ipfs/interface-ipfs-core/pull/188#issuecomment-354673246 - // and https://github.com/ipfs/go-ipfs/issues/4778 - if (withGo && isWindows) { - this.skip() - } - - topic = getTopic() - }) - - afterEach((done) => { - parallel([ - (cb) => ipfs1.pubsub.unsubscribe(topic, sub1, cb), - (cb) => ipfs2.pubsub.unsubscribe(topic, sub2, cb) - ], done) - }) - - it('send/receive 100 messages', function (done) { - this.timeout(2 * 60 * 1000) - - const msgBase = 'msg - ' - const count = 100 - let sendCount = 0 - let receivedCount = 0 - let startTime - let counter = 0 - - sub1 = (msg) => { - // go-ipfs can't send messages in order when there are - // only two nodes in the same machine ¯\_(ツ)_/¯ - // https://github.com/ipfs/js-ipfs-api/pull/493#issuecomment-289499943 - // const expectedMsg = msgBase + receivedCount - // const receivedMsg = msg.data.toString() - // expect(receivedMsg).to.eql(expectedMsg) - - receivedCount++ - - if (receivedCount >= count) { - const duration = new Date().getTime() - startTime - const opsPerSec = Math.floor(count / (duration / 1000)) - - console.log(`Send/Receive 100 messages took: ${duration} ms, ${opsPerSec} ops / s\n`) - - check() - } - } - - sub2 = (msg) => {} - - function check () { - if (++counter === 2) { - done() - } - } - - series([ - (cb) => ipfs1.pubsub.subscribe(topic, sub1, cb), - (cb) => ipfs2.pubsub.subscribe(topic, sub2, cb), - (cb) => waitForPeers(ipfs1, topic, [ipfs2.peerId.id], cb) - ], (err) => { - expect(err).to.not.exist() - startTime = new Date().getTime() - - whilst( - () => sendCount < count, - (cb) => { - const msgData = Buffer.from(msgBase + sendCount) - sendCount++ - ipfs2.pubsub.publish(topic, msgData, cb) - }, - check - ) - }) - }) - }) - - it('call subscribe/unsubscribe 10 times', (done) => { - const count = 10 - let sendCount = 0 - const handlers = [] - - const someTopic = getTopic() - - whilst( - () => sendCount < count, - (cb) => { - sendCount++ - const handler = (msg) => {} - handlers.push(handler) - ipfs1.pubsub.subscribe(someTopic, handler, cb) - }, - (err) => { - expect(err).to.not.exist() - each( - handlers, - (handler, cb) => ipfs1.pubsub.unsubscribe(someTopic, handler, cb), - (err) => { - expect(err).to.not.exist() - ipfs1.pubsub.ls((err, topics) => { - expect(err).to.not.exist() - expect(topics).to.eql([]) - done() - }) - } - ) - } - ) - }) - }) - }) - }) -} diff --git a/js/src/pubsub/index.js b/js/src/pubsub/index.js new file mode 100644 index 000000000..da5469773 --- /dev/null +++ b/js/src/pubsub/index.js @@ -0,0 +1,12 @@ +'use strict' +const { createSuite } = require('../utils/suite') + +const tests = { + publish: require('./publish'), + subscribe: require('./subscribe'), + unsubscribe: require('./unsubscribe'), + peers: require('./peers'), + ls: require('./ls') +} + +module.exports = createSuite(tests) diff --git a/js/src/pubsub/ls.js b/js/src/pubsub/ls.js new file mode 100644 index 000000000..7659d9bbf --- /dev/null +++ b/js/src/pubsub/ls.js @@ -0,0 +1,94 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const each = require('async/each') +const { getTopic } = require('./utils') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.pubsub.ls', function () { + this.timeout(80 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should return an empty list when no topics are subscribed', (done) => { + ipfs.pubsub.ls((err, topics) => { + expect(err).to.not.exist() + expect(topics.length).to.equal(0) + done() + }) + }) + + it('should return a list with 1 subscribed topic', (done) => { + const sub1 = (msg) => {} + const topic = getTopic() + + ipfs.pubsub.subscribe(topic, sub1, (err) => { + expect(err).to.not.exist() + + ipfs.pubsub.ls((err, topics) => { + expect(err).to.not.exist() + expect(topics).to.be.eql([topic]) + + ipfs.pubsub.unsubscribe(topic, sub1, done) + }) + }) + }) + + it('should return a list with 3 subscribed topics', (done) => { + const topics = [{ + name: 'one', + handler () {} + }, { + name: 'two', + handler () {} + }, { + name: 'three', + handler () {} + }] + + each(topics, (t, cb) => { + ipfs.pubsub.subscribe(t.name, t.handler, cb) + }, (err) => { + expect(err).to.not.exist() + + ipfs.pubsub.ls((err, list) => { + expect(err).to.not.exist() + + expect(list.sort()) + .to.eql(topics.map((t) => t.name).sort()) + + each(topics, (t, cb) => { + ipfs.pubsub.unsubscribe(t.name, t.handler, cb) + }, done) + }) + }) + }) + }) +} diff --git a/js/src/pubsub/peers.js b/js/src/pubsub/peers.js new file mode 100644 index 000000000..5f72f18a8 --- /dev/null +++ b/js/src/pubsub/peers.js @@ -0,0 +1,156 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const parallel = require('async/parallel') +const auto = require('async/auto') +const { spawnNodesWithId } = require('../utils/spawn') +const { waitForPeers, getTopic } = require('./utils') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.pubsub.peers', function () { + this.timeout(80 * 1000) + + let ipfs1 + let ipfs2 + let ipfs3 + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(100 * 1000) + + common.setup((err, factory) => { + if (err) return done(err) + + spawnNodesWithId(3, factory, (err, nodes) => { + if (err) return done(err) + + ipfs1 = nodes[0] + ipfs2 = nodes[1] + ipfs3 = nodes[2] + + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + before((done) => { + const ipfs2Addr = ipfs2.peerId.addresses.find((a) => a.includes('127.0.0.1')) + const ipfs3Addr = ipfs3.peerId.addresses.find((a) => a.includes('127.0.0.1')) + + parallel([ + (cb) => ipfs1.swarm.connect(ipfs2Addr, cb), + (cb) => ipfs1.swarm.connect(ipfs3Addr, cb), + (cb) => ipfs2.swarm.connect(ipfs3Addr, cb) + ], done) + }) + + describe('.peers', () => { + it('should not error when not subscribed to a topic', (done) => { + const topic = getTopic() + ipfs1.pubsub.peers(topic, (err, peers) => { + expect(err).to.not.exist() + // Should be empty() but as mentioned below go-ipfs returns more than it should + // expect(peers).to.be.empty() + + done() + }) + }) + + it('should not return extra peers', (done) => { + // Currently go-ipfs returns peers that have not been + // subscribed to the topic. Enable when go-ipfs has been fixed + const sub1 = (msg) => {} + const sub2 = (msg) => {} + const sub3 = (msg) => {} + + const topic = getTopic() + const topicOther = topic + 'different topic' + + parallel([ + (cb) => ipfs1.pubsub.subscribe(topic, sub1, cb), + (cb) => ipfs2.pubsub.subscribe(topicOther, sub2, cb), + (cb) => ipfs3.pubsub.subscribe(topicOther, sub3, cb) + ], (err) => { + expect(err).to.not.exist() + + ipfs1.pubsub.peers(topic, (err, peers) => { + expect(err).to.not.exist() + expect(peers).to.be.empty() + + parallel([ + (cb) => ipfs1.pubsub.unsubscribe(topic, sub1, cb), + (cb) => ipfs2.pubsub.unsubscribe(topicOther, sub2, cb), + (cb) => ipfs3.pubsub.unsubscribe(topicOther, sub3, cb) + ], done) + }) + }) + }) + + it('should return peers for a topic - one peer', (done) => { + // Currently go-ipfs returns peers that have not been + // subscribed to the topic. Enable when go-ipfs has been fixed + const sub1 = (msg) => {} + const sub2 = (msg) => {} + const sub3 = (msg) => {} + const topic = getTopic() + + auto({ + sub1: (cb) => ipfs1.pubsub.subscribe(topic, sub1, cb), + sub2: (cb) => ipfs2.pubsub.subscribe(topic, sub2, cb), + sub3: (cb) => ipfs3.pubsub.subscribe(topic, sub3, cb), + peers: ['sub1', 'sub2', 'sub3', (_, cb) => { + waitForPeers(ipfs1, topic, [ipfs2.peerId.id], cb) + }] + }, (err) => { + expect(err).to.not.exist() + + parallel([ + (cb) => ipfs1.pubsub.unsubscribe(topic, sub1, cb), + (cb) => ipfs2.pubsub.unsubscribe(topic, sub2, cb), + (cb) => ipfs3.pubsub.unsubscribe(topic, sub3, cb) + ], done) + }) + }) + + it('should return peers for a topic - multiple peers', (done) => { + const sub1 = (msg) => {} + const sub2 = (msg) => {} + const sub3 = (msg) => {} + const topic = getTopic() + + auto({ + sub1: (cb) => ipfs1.pubsub.subscribe(topic, sub1, cb), + sub2: (cb) => ipfs2.pubsub.subscribe(topic, sub2, cb), + sub3: (cb) => ipfs3.pubsub.subscribe(topic, sub3, cb), + peers: ['sub1', 'sub2', 'sub3', (_, cb) => { + waitForPeers(ipfs1, topic, [ + ipfs2.peerId.id, + ipfs3.peerId.id + ], cb) + }] + }, (err) => { + expect(err).to.not.exist() + + parallel([ + (cb) => ipfs1.pubsub.unsubscribe(topic, sub1, cb), + (cb) => ipfs2.pubsub.unsubscribe(topic, sub2, cb), + (cb) => ipfs3.pubsub.unsubscribe(topic, sub3, cb) + ], done) + }) + }) + }) + }) +} diff --git a/js/src/pubsub/publish.js b/js/src/pubsub/publish.js new file mode 100644 index 000000000..0394a01cf --- /dev/null +++ b/js/src/pubsub/publish.js @@ -0,0 +1,64 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ['error', 8] */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const times = require('async/times') +const hat = require('hat') +const { getTopic } = require('./utils') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.pubsub.publish', function () { + this.timeout(80 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should error on string messags', (done) => { + const topic = getTopic() + ipfs.pubsub.publish(topic, 'hello friend', (err) => { + expect(err).to.exist() + done() + }) + }) + + it('should publish message from buffer', (done) => { + const topic = getTopic() + ipfs.pubsub.publish(topic, Buffer.from(hat()), done) + }) + + it('should publish 10 times within time limit', (done) => { + const count = 10 + const topic = getTopic() + + times(count, (_, cb) => { + ipfs.pubsub.publish(topic, Buffer.from(hat()), cb) + }, done) + }) + }) +} diff --git a/js/src/pubsub/subscribe.js b/js/src/pubsub/subscribe.js new file mode 100644 index 000000000..30e984f2b --- /dev/null +++ b/js/src/pubsub/subscribe.js @@ -0,0 +1,405 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const series = require('async/series') +const parallel = require('async/parallel') +const times = require('async/times') +const auto = require('async/auto') +const { spawnNodesWithId } = require('../utils/spawn') +const { waitForPeers, makeCheck, getTopic } = require('./utils') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.pubsub.subscribe', function () { + this.timeout(80 * 1000) + + let ipfs1 + let ipfs2 + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(100 * 1000) + + common.setup((err, factory) => { + if (err) return done(err) + + spawnNodesWithId(2, factory, (err, nodes) => { + if (err) return done(err) + + ipfs1 = nodes[0] + ipfs2 = nodes[1] + + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + describe('single node', () => { + it('should subscribe to one topic', (done) => { + const check = makeCheck(2, done) + const topic = getTopic() + + const handler = (msg) => { + expect(msg.data.toString()).to.equal('hi') + expect(msg).to.have.property('seqno') + expect(Buffer.isBuffer(msg.seqno)).to.eql(true) + expect(msg).to.have.property('topicIDs').eql([topic]) + expect(msg).to.have.property('from', ipfs1.peerId.id) + + ipfs1.pubsub.unsubscribe(topic, handler, (err) => { + expect(err).to.not.exist() + + ipfs1.pubsub.ls((err, topics) => { + expect(err).to.not.exist() + expect(topics).to.be.empty() + check() + }) + }) + } + + ipfs1.pubsub.subscribe(topic, handler, (err) => { + expect(err).to.not.exist() + ipfs1.pubsub.publish(topic, Buffer.from('hi'), check) + }) + }) + + it('should subscribe to one topic (promised)', (done) => { + const check = makeCheck(2, done) + const topic = getTopic() + + const handler = (msg) => { + expect(msg.data.toString()).to.equal('hi') + expect(msg).to.have.property('seqno') + expect(Buffer.isBuffer(msg.seqno)).to.eql(true) + expect(msg).to.have.property('topicIDs').eql([topic]) + expect(msg).to.have.property('from', ipfs1.peerId.id) + + ipfs1.pubsub.unsubscribe(topic, handler, (err) => { + expect(err).to.not.exist() + + ipfs1.pubsub.ls((err, topics) => { + expect(err).to.not.exist() + expect(topics).to.be.empty() + check() + }) + }) + } + + ipfs1.pubsub + .subscribe(topic, handler) + .then(() => ipfs1.pubsub.publish(topic, Buffer.from('hi'), check)) + .catch((err) => expect(err).to.not.exist()) + }) + + it('should subscribe to one topic with options', (done) => { + const check = makeCheck(2, done) + const topic = getTopic() + + const handler = (msg) => { + expect(msg.data.toString()).to.equal('hi') + expect(msg).to.have.property('seqno') + expect(Buffer.isBuffer(msg.seqno)).to.eql(true) + expect(msg).to.have.property('topicIDs').eql([topic]) + expect(msg).to.have.property('from', ipfs1.peerId.id) + + ipfs1.pubsub.unsubscribe(topic, handler, (err) => { + expect(err).to.not.exist() + + ipfs1.pubsub.ls((err, topics) => { + expect(err).to.not.exist() + expect(topics).to.be.empty() + check() + }) + }) + } + + ipfs1.pubsub.subscribe(topic, handler, {}, (err) => { + expect(err).to.not.exist() + ipfs1.pubsub.publish(topic, Buffer.from('hi'), check) + }) + }) + + it('should subscribe to one topic with options (promised)', (done) => { + const check = makeCheck(2, done) + const topic = getTopic() + + const handler = (msg) => { + expect(msg.data.toString()).to.equal('hi') + expect(msg).to.have.property('seqno') + expect(Buffer.isBuffer(msg.seqno)).to.eql(true) + expect(msg).to.have.property('topicIDs').eql([topic]) + expect(msg).to.have.property('from', ipfs1.peerId.id) + + ipfs1.pubsub.unsubscribe(topic, handler, (err) => { + expect(err).to.not.exist() + + ipfs1.pubsub.ls((err, topics) => { + expect(err).to.not.exist() + expect(topics).to.be.empty() + check() + }) + }) + } + + ipfs1.pubsub + .subscribe(topic, handler, {}) + .then(() => ipfs1.pubsub.publish(topic, Buffer.from('hi'), check)) + .catch((err) => expect(err).to.not.exist()) + }) + + it('should subscribe to topic multiple times with different handlers', (done) => { + const topic = getTopic() + + const check = makeCheck(3, done) + const handler1 = (msg) => { + expect(msg.data.toString()).to.eql('hello') + + series([ + (cb) => ipfs1.pubsub.unsubscribe(topic, handler1, cb), + (cb) => ipfs1.pubsub.ls(cb), + (cb) => ipfs1.pubsub.unsubscribe(topic, handler2, cb), + (cb) => ipfs1.pubsub.ls(cb) + ], (err, res) => { + expect(err).to.not.exist() + + // Still subscribed as there is one listener left + expect(res[1]).to.eql([topic]) + // Now all listeners are gone no subscription anymore + expect(res[3]).to.eql([]) + check() + }) + } + + const handler2 = (msg) => { + expect(msg.data.toString()).to.eql('hello') + check() + } + + parallel([ + (cb) => ipfs1.pubsub.subscribe(topic, handler1, cb), + (cb) => ipfs1.pubsub.subscribe(topic, handler2, cb) + ], (err) => { + expect(err).to.not.exist() + ipfs1.pubsub.publish(topic, Buffer.from('hello'), check) + }) + }) + + it('should allow discover option to be passed', (done) => { + const check = makeCheck(2, done) + const topic = getTopic() + + const handler = (msg) => { + expect(msg.data.toString()).to.eql('hi') + ipfs1.pubsub.unsubscribe(topic, handler, check) + } + + ipfs1.pubsub.subscribe(topic, handler, { discover: true }, (err) => { + expect(err).to.not.exist() + ipfs1.pubsub.publish(topic, Buffer.from('hi'), check) + }) + }) + }) + + describe('multiple connected nodes', () => { + before((done) => { + if (ipfs1.pubsub.setMaxListeners) { + ipfs1.pubsub.setMaxListeners(100) + } + + if (ipfs2.pubsub.setMaxListeners) { + ipfs2.pubsub.setMaxListeners(100) + } + + const ipfs2Addr = ipfs2.peerId.addresses.find((a) => a.includes('127.0.0.1')) + ipfs1.swarm.connect(ipfs2Addr, done) + }) + + let topic + let sub1 + let sub2 + + beforeEach(() => { + topic = getTopic() + }) + + afterEach((done) => { + parallel([ + (cb) => ipfs1.pubsub.unsubscribe(topic, sub1, cb), + (cb) => ipfs2.pubsub.unsubscribe(topic, sub2, cb) + ], done) + }) + + it('should receive messages from a different node', (done) => { + const check = makeCheck(3, done) + const expectedString = 'hello from the other side' + + sub1 = (msg) => { + expect(msg.data.toString()).to.be.eql(expectedString) + expect(msg.from).to.eql(ipfs2.peerId.id) + check() + } + + sub2 = (msg) => { + expect(msg.data.toString()).to.be.eql(expectedString) + expect(msg.from).to.eql(ipfs2.peerId.id) + check() + } + + auto({ + sub1: (cb) => ipfs1.pubsub.subscribe(topic, sub1, cb), + sub2: (cb) => ipfs2.pubsub.subscribe(topic, sub2, cb), + peers: ['sub1', 'sub2', (_, cb) => { + waitForPeers(ipfs2, topic, [ipfs1.peerId.id], cb) + }] + }, (err) => { + expect(err).to.not.exist() + + ipfs2.pubsub.publish(topic, Buffer.from(expectedString), check) + }) + }) + + it('should round trip a non-utf8 binary buffer', (done) => { + const check = makeCheck(3, done) + const expectedHex = 'a36161636179656162830103056164a16466666666f4' + const buffer = Buffer.from(expectedHex, 'hex') + + sub1 = (msg) => { + try { + expect(msg.data.toString('hex')).to.be.eql(expectedHex) + expect(msg.from).to.eql(ipfs2.peerId.id) + check() + } catch (err) { + check(err) + } + } + + sub2 = (msg) => { + try { + expect(msg.data.toString('hex')).to.eql(expectedHex) + expect(msg.from).to.eql(ipfs2.peerId.id) + check() + } catch (err) { + check(err) + } + } + + auto({ + sub1: (cb) => ipfs1.pubsub.subscribe(topic, sub1, cb), + sub2: (cb) => ipfs2.pubsub.subscribe(topic, sub2, cb), + peers: ['sub1', 'sub2', (_, cb) => { + waitForPeers(ipfs2, topic, [ipfs1.peerId.id], cb) + }] + }, (err) => { + expect(err).to.not.exist() + + ipfs2.pubsub.publish(topic, buffer, check) + }) + }) + + it('should receive multiple messages', function (done) { + const inbox1 = [] + const inbox2 = [] + const outbox = ['hello', 'world', 'this', 'is', 'pubsub'] + + const check = makeCheck(outbox.length * 3, (err) => { + expect(inbox1.sort()).to.eql(outbox.sort()) + expect(inbox2.sort()).to.eql(outbox.sort()) + + done(err) + }) + + sub1 = (msg) => { + inbox1.push(msg.data.toString()) + expect(msg.from).to.eql(ipfs2.peerId.id) + check() + } + + sub2 = (msg) => { + inbox2.push(msg.data.toString()) + expect(msg.from).to.be.eql(ipfs2.peerId.id) + check() + } + + auto({ + sub1: (cb) => ipfs1.pubsub.subscribe(topic, sub1, cb), + sub2: (cb) => ipfs2.pubsub.subscribe(topic, sub2, cb), + peers: ['sub1', 'sub2', (_, cb) => { + waitForPeers(ipfs2, topic, [ipfs1.peerId.id], cb) + }] + }, (err) => { + expect(err).to.not.exist() + + outbox.forEach((msg) => { + ipfs2.pubsub.publish(topic, Buffer.from(msg), check) + }) + }) + }) + + it('send/receive 100 messages', function (done) { + this.timeout(2 * 60 * 1000) + + const msgBase = 'msg - ' + const count = 100 + let receivedCount = 0 + let startTime + let counter = 0 + + sub1 = (msg) => { + // go-ipfs can't send messages in order when there are + // only two nodes in the same machine ¯\_(ツ)_/¯ + // https://github.com/ipfs/js-ipfs-api/pull/493#issuecomment-289499943 + // const expectedMsg = msgBase + receivedCount + // const receivedMsg = msg.data.toString() + // expect(receivedMsg).to.eql(expectedMsg) + + receivedCount++ + + if (receivedCount >= count) { + const duration = new Date().getTime() - startTime + const opsPerSec = Math.floor(count / (duration / 1000)) + + console.log(`Send/Receive 100 messages took: ${duration} ms, ${opsPerSec} ops / s`) + + check() + } + } + + sub2 = (msg) => {} + + function check () { + if (++counter === 2) { + done() + } + } + + auto({ + sub1: (cb) => ipfs1.pubsub.subscribe(topic, sub1, cb), + sub2: (cb) => ipfs2.pubsub.subscribe(topic, sub2, cb), + peers: ['sub1', 'sub2', (_, cb) => { + waitForPeers(ipfs1, topic, [ipfs2.peerId.id], cb) + }] + }, (err) => { + expect(err).to.not.exist() + startTime = new Date().getTime() + + times(count, (sendCount, cb) => { + const msgData = Buffer.from(msgBase + sendCount) + ipfs2.pubsub.publish(topic, msgData, cb) + }, check) + }) + }) + }) + }) +} diff --git a/js/src/pubsub/unsubscribe.js b/js/src/pubsub/unsubscribe.js new file mode 100644 index 000000000..cc3f5d793 --- /dev/null +++ b/js/src/pubsub/unsubscribe.js @@ -0,0 +1,67 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ['error', 8] */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const each = require('async/each') +const times = require('async/times') +const { getTopic } = require('./utils') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.pubsub.unsubscribe', function () { + this.timeout(80 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should subscribe and unsubscribe 10 times', (done) => { + const count = 10 + const someTopic = getTopic() + + times(count, (_, cb) => { + const handler = (msg) => {} + ipfs.pubsub.subscribe(someTopic, handler, (err) => cb(err, handler)) + }, (err, handlers) => { + expect(err).to.not.exist() + each( + handlers, + (handler, cb) => ipfs.pubsub.unsubscribe(someTopic, handler, cb), + (err) => { + expect(err).to.not.exist() + // Assert unsubscribe worked + ipfs.pubsub.ls((err, topics) => { + expect(err).to.not.exist() + expect(topics).to.eql([]) + done() + }) + } + ) + }) + }) + }) +} diff --git a/js/src/pubsub/utils.js b/js/src/pubsub/utils.js new file mode 100644 index 000000000..03c8d3468 --- /dev/null +++ b/js/src/pubsub/utils.js @@ -0,0 +1,44 @@ +'use strict' + +const hat = require('hat') + +function waitForPeers (ipfs, topic, peersToWait, callback) { + const checkPeers = () => { + ipfs.pubsub.peers(topic, (err, peers) => { + if (err) { + return callback(err) + } + + const missingPeers = peersToWait + .map((e) => peers.indexOf(e) !== -1) + .filter((e) => !e) + + if (missingPeers.length === 0) { + return callback() + } + + setTimeout(checkPeers, 10) + }) + } + + checkPeers() +} + +exports.waitForPeers = waitForPeers + +function makeCheck (n, done) { + let i = 0 + return (err) => { + if (err) { + return done(err) + } + + if (++i === n) { + done() + } + } +} + +exports.makeCheck = makeCheck + +exports.getTopic = () => 'pubsub-tests-' + hat() From fb8719b0d59e1af7e41a59920341f286e4409fac Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Fri, 8 Jun 2018 12:30:29 +0100 Subject: [PATCH 25/41] feat(swarm): modularise swarm BREAKING CHANGE: Consumers of this test suite now have fine grained control over what tests are run. Tests can now be skipped and "onlyed" (run only specific tests). This can be done on a test, command and sub-system level. See the updated usage guide for instructions: https://github.com/ipfs/interface-ipfs-core/blob/master/README.md#usage. This means that tests skips depending on implementation (e.g. go/js), environment (e.g. node/browser) or platform (e.g. macOS/linux/windows) that were previously present in this suite have been removed. Consumers of this library should add their own skips based on the implementation that's being tested and the environment/platform that the tests are running on. The following other breaking changes have been made: 1. The common object passed to test suites has changed. It must now be a function that returns a common object (same shape and functions as before). 2. The `ipfs.ls` tests (not MFS `ipfs.files.ls`) is now a root level suite. You'll need to import it and use like `tests.ls(createCommon)` to have those tests run. 3. The `generic` suite (an alias to `miscellaneous`) has been removed. See https://github.com/ipfs/interface-ipfs-core/pull/290 for more details. License: MIT Signed-off-by: Alan Shaw --- js/src/pubsub/subscribe.js | 2 +- js/src/swarm.js | 306 ------------------------------------ js/src/swarm/addrs.js | 56 +++++++ js/src/swarm/connect.js | 50 ++++++ js/src/swarm/disconnect.js | 50 ++++++ js/src/swarm/index.js | 12 ++ js/src/swarm/local-addrs.js | 54 +++++++ js/src/swarm/peers.js | 204 ++++++++++++++++++++++++ 8 files changed, 427 insertions(+), 307 deletions(-) delete mode 100644 js/src/swarm.js create mode 100644 js/src/swarm/addrs.js create mode 100644 js/src/swarm/connect.js create mode 100644 js/src/swarm/disconnect.js create mode 100644 js/src/swarm/index.js create mode 100644 js/src/swarm/local-addrs.js create mode 100644 js/src/swarm/peers.js diff --git a/js/src/pubsub/subscribe.js b/js/src/pubsub/subscribe.js index 30e984f2b..037d4f190 100644 --- a/js/src/pubsub/subscribe.js +++ b/js/src/pubsub/subscribe.js @@ -308,7 +308,7 @@ module.exports = (createCommon, options) => { }) }) - it('should receive multiple messages', function (done) { + it('should receive multiple messages', (done) => { const inbox1 = [] const inbox2 = [] const outbox = ['hello', 'world', 'this', 'is', 'pubsub'] diff --git a/js/src/swarm.js b/js/src/swarm.js deleted file mode 100644 index 5905412c3..000000000 --- a/js/src/swarm.js +++ /dev/null @@ -1,306 +0,0 @@ -/* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ - -'use strict' - -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) -const series = require('async/series') -const multiaddr = require('multiaddr') -const PeerId = require('peer-id') -const os = require('os') -const path = require('path') -const hat = require('hat') -const { spawnNodes } = require('./utils/spawn') - -module.exports = (common) => { - describe('.swarm', function () { - this.timeout(80 * 1000) - - let ipfsA - let ipfsB - let factoryInstance - - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(100 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factoryInstance = factory - - spawnNodes(2, factory, (err, nodes) => { - expect(err).to.not.exist() - ipfsA = nodes[0] - ipfsB = nodes[1] - done() - }) - }) - }) - - after((done) => common.teardown(done)) - - let ipfsBId - - describe('callback API', function () { - this.timeout(80 * 1000) - - it('.connect', (done) => { - ipfsB.id((err, id) => { - expect(err).to.not.exist() - ipfsBId = id - const ipfsBAddr = id.addresses[0] - ipfsA.swarm.connect(ipfsBAddr, done) - }) - }) - - // for Identify to finish - it('time', (done) => setTimeout(done, 1500)) - - describe('.peers', () => { - beforeEach((done) => { - const ipfsBAddr = ipfsBId.addresses[0] - ipfsA.swarm.connect(ipfsBAddr, done) - }) - - it('default', (done) => { - ipfsA.swarm.peers((err, peers) => { - expect(err).to.not.exist() - expect(peers).to.have.length.above(0) - - const peer = peers[0] - - expect(peer).to.have.a.property('addr') - expect(multiaddr.isMultiaddr(peer.addr)).to.equal(true) - expect(peer).to.have.a.property('peer') - expect(PeerId.isPeerId(peer.peer)).to.equal(true) - expect(peer).to.not.have.a.property('latency') - - // only available in 0.4.5 - // expect(peer).to.have.a.property('muxer') - // expect(peer).to.not.have.a.property('streams') - - done() - }) - }) - - it('verbose', (done) => { - ipfsA.swarm.peers({verbose: true}, (err, peers) => { - expect(err).to.not.exist() - expect(peers).to.have.length.above(0) - - const peer = peers[0] - expect(peer).to.have.a.property('addr') - expect(multiaddr.isMultiaddr(peer.addr)).to.equal(true) - expect(peer).to.have.a.property('peer') - expect(peer).to.have.a.property('latency') - - // Only available in 0.4.5 - // expect(peer).to.have.a.property('muxer') - // expect(peer).to.have.a.property('streams') - - done() - }) - }) - - describe('Shows connected peers only once', () => { - function getConfig (addrs) { - addrs = Array.isArray(addrs) ? addrs : [addrs] - - return { - Addresses: { - Swarm: addrs, - API: '/ip4/127.0.0.1/tcp/0', - Gateway: '/ip4/127.0.0.1/tcp/0' - }, - Bootstrap: [], - Discovery: { - MDNS: { - Enabled: false - } - } - } - } - - function getRepoPath () { - return path.join(os.tmpdir(), '.ipfs-' + hat()) - } - - it('Connecting two peers with one address each', (done) => { - let nodeA - let nodeB - let nodeBAddress - const addresses = ['/ip4/127.0.0.1/tcp/0'] - const config = getConfig(addresses) - series([ - (cb) => { - factoryInstance.spawnNode(getRepoPath(), config, (err, node) => { - expect(err).to.not.exist() - nodeA = node - cb() - }) - }, - (cb) => { - factoryInstance.spawnNode(getRepoPath(), config, (err, node) => { - expect(err).to.not.exist() - nodeB = node - cb() - }) - }, - (cb) => { - nodeB.id((err, info) => { - expect(err).to.not.exist() - nodeBAddress = info.addresses[0] - cb() - }) - }, - (cb) => nodeA.swarm.connect(nodeBAddress, cb), - (cb) => setTimeout(cb, 1000), // time for identify - (cb) => { - nodeA.swarm.peers((err, peers) => { - expect(err).to.not.exist() - expect(peers).to.have.length(1) - cb() - }) - }, - (cb) => { - nodeB.swarm.peers((err, peers) => { - expect(err).to.not.exist() - expect(peers).to.have.length(1) - cb() - }) - } - ], done) - }) - - it('Connecting two peers with two addresses each', (done) => { - let nodeA - let nodeB - let nodeBAddress - - // TODO: Change to port 0, needs: https://github.com/ipfs/interface-ipfs-core/issues/152 - const configA = getConfig([ - '/ip4/127.0.0.1/tcp/16543', - '/ip4/127.0.0.1/tcp/16544' - ]) - const configB = getConfig([ - '/ip4/127.0.0.1/tcp/26545', - '/ip4/127.0.0.1/tcp/26546' - ]) - series([ - (cb) => { - factoryInstance.spawnNode(getRepoPath(), configA, (err, node) => { - expect(err).to.not.exist() - nodeA = node - cb() - }) - }, - (cb) => { - factoryInstance.spawnNode(getRepoPath(), configB, (err, node) => { - expect(err).to.not.exist() - nodeB = node - cb() - }) - }, - (cb) => { - nodeB.id((err, info) => { - expect(err).to.not.exist() - nodeBAddress = info.addresses[0] - cb() - }) - }, - (cb) => nodeA.swarm.connect(nodeBAddress, cb), - (cb) => setTimeout(cb, 1000), // time for identify - (cb) => { - nodeA.swarm.peers((err, peers) => { - expect(err).to.not.exist() - expect(peers).to.have.length(1) - cb() - }) - }, - (cb) => { - nodeB.swarm.peers((err, peers) => { - expect(err).to.not.exist() - expect(peers).to.have.length(1) - cb() - }) - } - ], done) - }) - }) - }) - - it('.addrs', (done) => { - ipfsA.swarm.addrs((err, multiaddrs) => { - expect(err).to.not.exist() - expect(multiaddrs).to.not.be.empty() - expect(multiaddrs).to.be.an('array') - expect(multiaddrs[0].constructor.name).to.be.eql('PeerInfo') - done() - }) - }) - - it('.localAddrs', (done) => { - ipfsA.swarm.localAddrs((err, multiaddrs) => { - expect(err).to.not.exist() - expect(multiaddrs).to.have.length.above(0) - done() - }) - }) - - it('.disconnect', (done) => { - ipfsB.id((err, id) => { - expect(err).to.not.exist() - const ipfsBAddr = id.addresses[0] - ipfsA.swarm.disconnect(ipfsBAddr, done) - }) - }) - }) - - describe('promise API', function () { - this.timeout(80 * 1000) - - it('.connect', () => { - return ipfsB.id() - .then((id) => { - const ipfsBAddr = id.addresses[0] - return ipfsA.swarm.connect(ipfsBAddr) - }) - }) - - // for Identify to finish - it('time', (done) => { - setTimeout(done, 1500) - }) - - it('.peers', () => { - return ipfsA.swarm.peers().then((multiaddrs) => { - expect(multiaddrs).to.have.length.above(0) - }) - }) - - it('.addrs', () => { - return ipfsA.swarm.addrs().then((multiaddrs) => { - expect(multiaddrs).to.have.length.above(0) - }) - }) - - it('.localAddrs', () => { - return ipfsA.swarm.localAddrs().then((multiaddrs) => { - expect(multiaddrs).to.have.length.above(0) - }) - }) - - it('.disconnect', () => { - return ipfsB.id() - .then((id) => { - const ipfsBAddr = id.addresses[0] - return ipfsA.swarm.disconnect(ipfsBAddr) - }) - }) - }) - }) -} diff --git a/js/src/swarm/addrs.js b/js/src/swarm/addrs.js new file mode 100644 index 000000000..5d350b088 --- /dev/null +++ b/js/src/swarm/addrs.js @@ -0,0 +1,56 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const { spawnNodes } = require('../utils/spawn') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.swarm.addrs', function () { + this.timeout(80 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(100 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + + spawnNodes(2, factory, (err, nodes) => { + expect(err).to.not.exist() + ipfs = nodes[0] + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should get a list of node addresses', (done) => { + ipfs.swarm.addrs((err, multiaddrs) => { + expect(err).to.not.exist() + expect(multiaddrs).to.not.be.empty() + expect(multiaddrs).to.be.an('array') + expect(multiaddrs[0].constructor.name).to.be.eql('PeerInfo') + done() + }) + }) + + it('should get a list of node addresses (promised)', () => { + return ipfs.swarm.addrs().then((multiaddrs) => { + expect(multiaddrs).to.have.length.above(0) + }) + }) + }) +} diff --git a/js/src/swarm/connect.js b/js/src/swarm/connect.js new file mode 100644 index 000000000..6c23847bd --- /dev/null +++ b/js/src/swarm/connect.js @@ -0,0 +1,50 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const { spawnNodesWithId } = require('../utils/spawn') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.swarm.connect', function () { + this.timeout(80 * 1000) + + let ipfsA + let ipfsB + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(100 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + + spawnNodesWithId(2, factory, (err, nodes) => { + expect(err).to.not.exist() + ipfsA = nodes[0] + ipfsB = nodes[1] + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should connect to a peer', (done) => { + ipfsA.swarm.connect(ipfsB.peerId.addresses[0], done) + }) + + it('should connect to a peer (promised)', () => { + return ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) + }) + }) +} diff --git a/js/src/swarm/disconnect.js b/js/src/swarm/disconnect.js new file mode 100644 index 000000000..74742eeb8 --- /dev/null +++ b/js/src/swarm/disconnect.js @@ -0,0 +1,50 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const { spawnNodesWithId } = require('../utils/spawn') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.swarm.disconnect', function () { + this.timeout(80 * 1000) + + let ipfsA + let ipfsB + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(100 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + + spawnNodesWithId(2, factory, (err, nodes) => { + expect(err).to.not.exist() + ipfsA = nodes[0] + ipfsB = nodes[1] + ipfsA.swarm.connect(ipfsB.peerId.addresses[0], done) + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should disconnect from a peer', (done) => { + ipfsA.swarm.disconnect(ipfsB.peerId.addresses[0], done) + }) + + it('should disconnect from a peer (promised)', () => { + return ipfsA.swarm.disconnect(ipfsB.peerId.addresses[0]) + }) + }) +} diff --git a/js/src/swarm/index.js b/js/src/swarm/index.js new file mode 100644 index 000000000..23bef9e36 --- /dev/null +++ b/js/src/swarm/index.js @@ -0,0 +1,12 @@ +'use strict' +const { createSuite } = require('../utils/suite') + +const tests = { + connect: require('./connect'), + peers: require('./peers'), + addrs: require('./addrs'), + localAddrs: require('./local-addrs'), + disconnect: require('./disconnect') +} + +module.exports = createSuite(tests) diff --git a/js/src/swarm/local-addrs.js b/js/src/swarm/local-addrs.js new file mode 100644 index 000000000..029d8d64d --- /dev/null +++ b/js/src/swarm/local-addrs.js @@ -0,0 +1,54 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const { spawnNodes } = require('../utils/spawn') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.swarm.localAddrs', function () { + this.timeout(80 * 1000) + + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(100 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + + spawnNodes(2, factory, (err, nodes) => { + expect(err).to.not.exist() + ipfs = nodes[0] + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should list local addresses the node is listening on', (done) => { + ipfs.swarm.localAddrs((err, multiaddrs) => { + expect(err).to.not.exist() + expect(multiaddrs).to.have.length.above(0) + done() + }) + }) + + it('should list local addresses the node is listening on (promised)', () => { + return ipfs.swarm.localAddrs().then((multiaddrs) => { + expect(multiaddrs).to.have.length.above(0) + }) + }) + }) +} diff --git a/js/src/swarm/peers.js b/js/src/swarm/peers.js new file mode 100644 index 000000000..c9b6811da --- /dev/null +++ b/js/src/swarm/peers.js @@ -0,0 +1,204 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const auto = require('async/auto') +const multiaddr = require('multiaddr') +const PeerId = require('peer-id') +const os = require('os') +const path = require('path') +const hat = require('hat') +const { spawnNodesWithId } = require('../utils/spawn') +const { getDescribe, getIt } = require('../utils/mocha') + +const expect = chai.expect +chai.use(dirtyChai) + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.swarm', function () { + this.timeout(80 * 1000) + + let ipfsA + let ipfsB + let ipfsFactory + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(100 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + ipfsFactory = factory + + spawnNodesWithId(2, factory, (err, nodes) => { + expect(err).to.not.exist() + ipfsA = nodes[0] + ipfsB = nodes[1] + ipfsA.swarm.connect(ipfsB.peerId.addresses[0], done) + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should list peers this node is connected to', (done) => { + ipfsA.swarm.peers((err, peers) => { + expect(err).to.not.exist() + expect(peers).to.have.length.above(0) + + const peer = peers[0] + + expect(peer).to.have.a.property('addr') + expect(multiaddr.isMultiaddr(peer.addr)).to.equal(true) + expect(peer).to.have.a.property('peer') + expect(PeerId.isPeerId(peer.peer)).to.equal(true) + expect(peer).to.not.have.a.property('latency') + + // only available in 0.4.5 + // expect(peer).to.have.a.property('muxer') + // expect(peer).to.not.have.a.property('streams') + + done() + }) + }) + + it('should list peers this node is connected to (promised)', () => { + return ipfsA.swarm.peers().then((peers) => { + expect(peers).to.have.length.above(0) + + const peer = peers[0] + + expect(peer).to.have.a.property('addr') + expect(multiaddr.isMultiaddr(peer.addr)).to.equal(true) + expect(peer).to.have.a.property('peer') + expect(PeerId.isPeerId(peer.peer)).to.equal(true) + expect(peer).to.not.have.a.property('latency') + + // only available in 0.4.5 + // expect(peer).to.have.a.property('muxer') + // expect(peer).to.not.have.a.property('streams') + }) + }) + + it('should list peers this node is connected to with verbose option', (done) => { + ipfsA.swarm.peers({ verbose: true }, (err, peers) => { + expect(err).to.not.exist() + expect(peers).to.have.length.above(0) + + const peer = peers[0] + expect(peer).to.have.a.property('addr') + expect(multiaddr.isMultiaddr(peer.addr)).to.equal(true) + expect(peer).to.have.a.property('peer') + expect(peer).to.have.a.property('latency') + + // Only available in 0.4.5 + // expect(peer).to.have.a.property('muxer') + // expect(peer).to.have.a.property('streams') + + done() + }) + }) + + function getConfig (addrs) { + addrs = Array.isArray(addrs) ? addrs : [addrs] + + return { + Addresses: { + Swarm: addrs, + API: '/ip4/127.0.0.1/tcp/0', + Gateway: '/ip4/127.0.0.1/tcp/0' + }, + Bootstrap: [], + Discovery: { + MDNS: { + Enabled: false + } + } + } + } + + function getRepoPath () { + return path.join(os.tmpdir(), '.ipfs-' + hat()) + } + + it('should list peers only once', (done) => { + const config = getConfig(['/ip4/127.0.0.1/tcp/0']) + + auto({ + nodeA: (cb) => ipfsFactory.spawnNode(getRepoPath(), config, cb), + nodeB: ['nodeA', (_, cb) => { + ipfsFactory.spawnNode(getRepoPath(), config, cb) + }], + nodeBAddress: ['nodeB', (res, cb) => { + res.nodeB.id((err, info) => { + if (err) return cb(err) + cb(null, info.addresses[0]) + }) + }], + connectA2B: ['nodeA', 'nodeBAddress', (res, cb) => { + res.nodeA.swarm.connect(res.nodeBAddress, cb) + }], + // time for identify + wait: ['connectA2B', (_, cb) => setTimeout(cb, 1000)], + nodeAPeers: ['nodeA', 'wait', (res, cb) => { + res.nodeA.swarm.peers(cb) + }], + nodeBPeers: ['nodeB', 'wait', (res, cb) => { + res.nodeB.swarm.peers(cb) + }] + }, (err, res) => { + expect(err).to.not.exist() + expect(res.nodeAPeers).to.have.length(1) + expect(res.nodeBPeers).to.have.length(1) + done() + }) + }) + + it('should list peers only once even if they have multiple addresses', (done) => { + // TODO: Change to port 0, needs: https://github.com/ipfs/interface-ipfs-core/issues/152 + const configA = getConfig([ + '/ip4/127.0.0.1/tcp/16543', + '/ip4/127.0.0.1/tcp/16544' + ]) + const configB = getConfig([ + '/ip4/127.0.0.1/tcp/26545', + '/ip4/127.0.0.1/tcp/26546' + ]) + + auto({ + nodeA: (cb) => ipfsFactory.spawnNode(getRepoPath(), configA, cb), + nodeB: ['nodeA', (_, cb) => { + ipfsFactory.spawnNode(getRepoPath(), configB, cb) + }], + nodeBAddress: ['nodeB', (res, cb) => { + res.nodeB.id((err, info) => { + if (err) return cb(err) + cb(null, info.addresses[0]) + }) + }], + connectA2B: ['nodeA', 'nodeBAddress', (res, cb) => { + res.nodeA.swarm.connect(res.nodeBAddress, cb) + }], + // time for identify + wait: ['connectA2B', (_, cb) => setTimeout(cb, 1000)], + nodeAPeers: ['nodeA', 'wait', (res, cb) => { + res.nodeA.swarm.peers(cb) + }], + nodeBPeers: ['nodeB', 'wait', (res, cb) => { + res.nodeB.swarm.peers(cb) + }] + }, (err, res) => { + expect(err).to.not.exist() + expect(res.nodeAPeers).to.have.length(1) + expect(res.nodeBPeers).to.have.length(1) + done() + }) + }) + }) +} From b12fb09587a0c0335ed32356bef60bb6abae476c Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Fri, 8 Jun 2018 13:15:53 +0100 Subject: [PATCH 26/41] chore: fix warning for new Buffer License: MIT Signed-off-by: Alan Shaw --- js/src/block/get.js | 4 ++-- js/src/block/put.js | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/js/src/block/get.js b/js/src/block/get.js index a077f614c..4462d457f 100644 --- a/js/src/block/get.js +++ b/js/src/block/get.js @@ -45,7 +45,7 @@ module.exports = (createCommon, options) => { ipfs.block.get(cid, (err, block) => { expect(err).to.not.exist() - expect(block.data).to.eql(new Buffer('blorb')) + expect(block.data).to.eql(Buffer.from('blorb')) expect(block.cid.multihash).to.eql(cid.multihash) done() }) @@ -54,7 +54,7 @@ module.exports = (createCommon, options) => { it('should get by CID in string', (done) => { ipfs.block.get(multihash.toB58String(hash), (err, block) => { expect(err).to.not.exist() - expect(block.data).to.eql(new Buffer('blorb')) + expect(block.data).to.eql(Buffer.from('blorb')) expect(block.cid.multihash).to.eql(hash) done() }) diff --git a/js/src/block/put.js b/js/src/block/put.js index ad4fbbe41..357544715 100644 --- a/js/src/block/put.js +++ b/js/src/block/put.js @@ -85,7 +85,7 @@ module.exports = (createCommon, options) => { ipfs.block.put(b, (err, block) => { expect(err).to.not.exist() - expect(block.data).to.eql(new Buffer('blorb')) + expect(block.data).to.eql(Buffer.from('blorb')) expect(block.cid.multihash).to.eql(multihash.fromB58String(expectedHash)) done() }) From b969416eaee79d967fa09be0c255e98a36af4e07 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Fri, 8 Jun 2018 15:50:48 +0100 Subject: [PATCH 27/41] fix: import small file fixture License: MIT Signed-off-by: Alan Shaw --- js/src/files/cat-readable-stream.js | 1 + 1 file changed, 1 insertion(+) diff --git a/js/src/files/cat-readable-stream.js b/js/src/files/cat-readable-stream.js index 44a5fbbe4..2bb5e08fa 100644 --- a/js/src/files/cat-readable-stream.js +++ b/js/src/files/cat-readable-stream.js @@ -36,6 +36,7 @@ module.exports = (createCommon, options) => { }) before((done) => ipfs.files.add(fixtures.bigFile.data, done)) + before((done) => ipfs.files.add(fixtures.smallFile.data, done)) after((done) => common.teardown(done)) From a8492f2a7164c8768da0517ed287b9b46f9438c4 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Wed, 13 Jun 2018 14:08:00 +0200 Subject: [PATCH 28/41] fix: swarm peers suite name License: MIT Signed-off-by: Alan Shaw --- js/src/swarm/peers.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/js/src/swarm/peers.js b/js/src/swarm/peers.js index c9b6811da..725d28cc4 100644 --- a/js/src/swarm/peers.js +++ b/js/src/swarm/peers.js @@ -20,7 +20,7 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.swarm', function () { + describe('.swarm.peers', function () { this.timeout(80 * 1000) let ipfsA From 3eda55d9662de4add78a94eb3840d206b749b57e Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Wed, 13 Jun 2018 14:10:12 +0200 Subject: [PATCH 29/41] fix: spawn only as many nodes as needed License: MIT Signed-off-by: Alan Shaw --- js/src/swarm/addrs.js | 6 ++---- js/src/swarm/local-addrs.js | 6 ++---- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/js/src/swarm/addrs.js b/js/src/swarm/addrs.js index 5d350b088..24c08a292 100644 --- a/js/src/swarm/addrs.js +++ b/js/src/swarm/addrs.js @@ -3,7 +3,6 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') -const { spawnNodes } = require('../utils/spawn') const { getDescribe, getIt } = require('../utils/mocha') const expect = chai.expect @@ -26,10 +25,9 @@ module.exports = (createCommon, options) => { common.setup((err, factory) => { expect(err).to.not.exist() - - spawnNodes(2, factory, (err, nodes) => { + factory.spawnNode((err, node) => { expect(err).to.not.exist() - ipfs = nodes[0] + ipfs = node done() }) }) diff --git a/js/src/swarm/local-addrs.js b/js/src/swarm/local-addrs.js index 029d8d64d..ce7988c36 100644 --- a/js/src/swarm/local-addrs.js +++ b/js/src/swarm/local-addrs.js @@ -3,7 +3,6 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') -const { spawnNodes } = require('../utils/spawn') const { getDescribe, getIt } = require('../utils/mocha') const expect = chai.expect @@ -26,10 +25,9 @@ module.exports = (createCommon, options) => { common.setup((err, factory) => { expect(err).to.not.exist() - - spawnNodes(2, factory, (err, nodes) => { + factory.spawnNode((err, node) => { expect(err).to.not.exist() - ipfs = nodes[0] + ipfs = node done() }) }) From 16158315c38f51712fbdf7de2f6afe8b06af406d Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Tue, 19 Jun 2018 20:49:19 +0100 Subject: [PATCH 30/41] chore: remove safe-buffer License: MIT Signed-off-by: Alan Shaw --- js/src/block/get.js | 1 - js/src/block/put.js | 1 - 2 files changed, 2 deletions(-) diff --git a/js/src/block/get.js b/js/src/block/get.js index 4462d457f..cb6b3ed26 100644 --- a/js/src/block/get.js +++ b/js/src/block/get.js @@ -5,7 +5,6 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') const multihash = require('multihashes') const CID = require('cids') -const Buffer = require('safe-buffer').Buffer const auto = require('async/auto') const { getDescribe, getIt } = require('../utils/mocha') diff --git a/js/src/block/put.js b/js/src/block/put.js index 357544715..2b40d7f0c 100644 --- a/js/src/block/put.js +++ b/js/src/block/put.js @@ -6,7 +6,6 @@ const dirtyChai = require('dirty-chai') const Block = require('ipfs-block') const multihash = require('multihashes') const CID = require('cids') -const Buffer = require('safe-buffer').Buffer const { getDescribe, getIt } = require('../utils/mocha') const expect = chai.expect From 5567ddf96954ba9c2de9b555d92895a370241d35 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Tue, 19 Jun 2018 21:12:58 +0100 Subject: [PATCH 31/41] chore: move expect into mocha util License: MIT Signed-off-by: Alan Shaw --- js/src/block/get.js | 7 +------ js/src/block/put.js | 7 +------ js/src/block/stat.js | 7 +------ js/src/bootstrap/add.js | 7 +------ js/src/bootstrap/list.js | 7 +------ js/src/bootstrap/rm.js | 7 +------ js/src/config/get.js | 7 +------ js/src/config/replace.js | 7 +------ js/src/config/set.js | 7 +------ js/src/dag/get.js | 7 +------ js/src/dag/put.js | 7 +------ js/src/dag/tree.js | 7 +------ js/src/dht/findpeer.js | 7 +------ js/src/dht/findprovs.js | 7 +------ js/src/dht/get.js | 7 +------ js/src/dht/provide.js | 7 +------ js/src/dht/put.js | 7 +------ js/src/dht/query.js | 7 +------ js/src/files/add-pull-stream.js | 7 +------ js/src/files/add-readable-stream.js | 7 +------ js/src/files/add.js | 7 +------ js/src/files/cat-pull-stream.js | 7 +------ js/src/files/cat-readable-stream.js | 7 +------ js/src/files/cat.js | 7 +------ js/src/files/cp.js | 7 +------ js/src/files/flush.js | 7 +------ js/src/files/get-pull-stream.js | 7 +------ js/src/files/get-readable-stream.js | 7 +------ js/src/files/get.js | 7 +------ js/src/files/ls.js | 7 +------ js/src/files/mkdir.js | 7 +------ js/src/files/mv.js | 7 +------ js/src/files/read.js | 7 +------ js/src/files/rm.js | 7 +------ js/src/files/stat.js | 7 +------ js/src/files/write.js | 7 +------ js/src/key/export.js | 7 +------ js/src/key/gen.js | 7 +------ js/src/key/import.js | 7 +------ js/src/key/list.js | 7 +------ js/src/key/rm.js | 7 +------ js/src/ls/ls-pull-stream.js | 7 +------ js/src/ls/ls-readable-stream.js | 7 +------ js/src/ls/ls.js | 7 +------ js/src/miscellaneous/dns.js | 7 +------ js/src/miscellaneous/id.js | 7 +------ js/src/miscellaneous/stop.js | 7 +------ js/src/miscellaneous/version.js | 7 +------ js/src/object/data.js | 7 +------ js/src/object/get.js | 7 +------ js/src/object/links.js | 7 +------ js/src/object/new.js | 7 +------ js/src/object/patch/add-link.js | 7 +------ js/src/object/patch/append-data.js | 7 +------ js/src/object/patch/rm-link.js | 7 +------ js/src/object/patch/set-data.js | 7 +------ js/src/object/put.js | 7 +------ js/src/object/stat.js | 7 +------ js/src/pin/add.js | 7 +------ js/src/pin/ls.js | 7 +------ js/src/pin/rm.js | 7 +------ js/src/ping/ping-pull-stream.js | 7 +------ js/src/ping/ping-readable-stream.js | 7 +------ js/src/ping/ping.js | 7 +------ js/src/ping/utils.js | 6 +----- js/src/pubsub/ls.js | 7 +------ js/src/pubsub/peers.js | 7 +------ js/src/pubsub/publish.js | 8 +------- js/src/pubsub/subscribe.js | 7 +------ js/src/pubsub/unsubscribe.js | 8 +------- js/src/repo/gc.js | 7 +------ js/src/repo/stat.js | 7 +------ js/src/repo/version.js | 7 +------ js/src/stats/bitswap.js | 7 +------ js/src/stats/bw-pull-stream.js | 7 +------ js/src/stats/bw-readable-stream.js | 7 +------ js/src/stats/bw.js | 7 +------ js/src/stats/repo.js | 7 +------ js/src/stats/utils.js | 2 +- js/src/swarm/addrs.js | 7 +------ js/src/swarm/connect.js | 7 +------ js/src/swarm/disconnect.js | 7 +------ js/src/swarm/local-addrs.js | 7 +------ js/src/swarm/peers.js | 7 +------ js/src/types.js | 7 +------ js/src/util.js | 7 +------ js/src/utils/mocha.js | 7 +++++++ 87 files changed, 93 insertions(+), 512 deletions(-) diff --git a/js/src/block/get.js b/js/src/block/get.js index cb6b3ed26..dbacdcb95 100644 --- a/js/src/block/get.js +++ b/js/src/block/get.js @@ -1,15 +1,10 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const multihash = require('multihashes') const CID = require('cids') const auto = require('async/auto') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/block/put.js b/js/src/block/put.js index 2b40d7f0c..e3f5ed8ab 100644 --- a/js/src/block/put.js +++ b/js/src/block/put.js @@ -1,15 +1,10 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const Block = require('ipfs-block') const multihash = require('multihashes') const CID = require('cids') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/block/stat.js b/js/src/block/stat.js index 49f3e320a..a420dc286 100644 --- a/js/src/block/stat.js +++ b/js/src/block/stat.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const CID = require('cids') const auto = require('async/auto') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/bootstrap/add.js b/js/src/bootstrap/add.js index a37f9efcd..96578d24d 100644 --- a/js/src/bootstrap/add.js +++ b/js/src/bootstrap/add.js @@ -1,12 +1,7 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') const invalidArg = 'this/Is/So/Invalid/' const validIp4 = '/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z' diff --git a/js/src/bootstrap/list.js b/js/src/bootstrap/list.js index 7c0093b74..31bfe3f64 100644 --- a/js/src/bootstrap/list.js +++ b/js/src/bootstrap/list.js @@ -1,12 +1,7 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/bootstrap/rm.js b/js/src/bootstrap/rm.js index b7f2e0233..24f6248f8 100644 --- a/js/src/bootstrap/rm.js +++ b/js/src/bootstrap/rm.js @@ -1,12 +1,7 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') const invalidArg = 'this/Is/So/Invalid/' diff --git a/js/src/config/get.js b/js/src/config/get.js index 00e4b2637..b6730af3d 100644 --- a/js/src/config/get.js +++ b/js/src/config/get.js @@ -1,12 +1,7 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/config/replace.js b/js/src/config/replace.js index b40684218..8620ea099 100644 --- a/js/src/config/replace.js +++ b/js/src/config/replace.js @@ -1,12 +1,7 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/config/set.js b/js/src/config/set.js index 1890bab77..6570413a9 100644 --- a/js/src/config/set.js +++ b/js/src/config/set.js @@ -1,12 +1,7 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/dag/get.js b/js/src/dag/get.js index a06c17dff..399c64107 100644 --- a/js/src/dag/get.js +++ b/js/src/dag/get.js @@ -1,17 +1,12 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { series, eachSeries } = require('async') const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode const dagCBOR = require('ipld-dag-cbor') const { spawnNodeWithId } = require('../utils/spawn') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/dag/put.js b/js/src/dag/put.js index 46605a7b8..b01621cfe 100644 --- a/js/src/dag/put.js +++ b/js/src/dag/put.js @@ -1,17 +1,12 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode const dagCBOR = require('ipld-dag-cbor') const CID = require('cids') const { spawnNodeWithId } = require('../utils/spawn') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/dag/tree.js b/js/src/dag/tree.js index 78d85fc28..d0a6ed4a6 100644 --- a/js/src/dag/tree.js +++ b/js/src/dag/tree.js @@ -1,16 +1,11 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { series, eachSeries } = require('async') const dagPB = require('ipld-dag-pb') const dagCBOR = require('ipld-dag-cbor') const { spawnNodeWithId } = require('../utils/spawn') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/dht/findpeer.js b/js/src/dht/findpeer.js index 962bfade5..817742d66 100644 --- a/js/src/dht/findpeer.js +++ b/js/src/dht/findpeer.js @@ -1,13 +1,8 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { spawnNodesWithId } = require('../utils/spawn') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/dht/findprovs.js b/js/src/dht/findprovs.js index 0205a88f3..b15262254 100644 --- a/js/src/dht/findprovs.js +++ b/js/src/dht/findprovs.js @@ -1,15 +1,10 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const waterfall = require('async/waterfall') const CID = require('cids') const { spawnNodesWithId } = require('../utils/spawn') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/dht/get.js b/js/src/dht/get.js index 6cffd201f..317ace930 100644 --- a/js/src/dht/get.js +++ b/js/src/dht/get.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const waterfall = require('async/waterfall') const { spawnNodesWithId } = require('../utils/spawn') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/dht/provide.js b/js/src/dht/provide.js index eb79b02b6..d26e9b5fc 100644 --- a/js/src/dht/provide.js +++ b/js/src/dht/provide.js @@ -1,13 +1,8 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const CID = require('cids') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/dht/put.js b/js/src/dht/put.js index c315d87fc..df07c2310 100644 --- a/js/src/dht/put.js +++ b/js/src/dht/put.js @@ -1,12 +1,7 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/dht/query.js b/js/src/dht/query.js index 411bef39c..c0ee95c15 100644 --- a/js/src/dht/query.js +++ b/js/src/dht/query.js @@ -1,13 +1,8 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { spawnNodesWithId } = require('../utils/spawn') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/add-pull-stream.js b/js/src/files/add-pull-stream.js index 1d6782b46..e6a3c2611 100644 --- a/js/src/files/add-pull-stream.js +++ b/js/src/files/add-pull-stream.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { fixtures } = require('./utils') const pull = require('pull-stream') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/add-readable-stream.js b/js/src/files/add-readable-stream.js index 27d84a223..a3279ee42 100644 --- a/js/src/files/add-readable-stream.js +++ b/js/src/files/add-readable-stream.js @@ -1,13 +1,8 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { fixtures } = require('./utils') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/add.js b/js/src/files/add.js index 8ffb20677..56c2aed34 100644 --- a/js/src/files/add.js +++ b/js/src/files/add.js @@ -1,17 +1,12 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { fixtures } = require('./utils') const Readable = require('readable-stream').Readable const pull = require('pull-stream') const path = require('path') const expectTimeout = require('../utils/expect-timeout') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/cat-pull-stream.js b/js/src/files/cat-pull-stream.js index 7d067dd23..fb0e312f3 100644 --- a/js/src/files/cat-pull-stream.js +++ b/js/src/files/cat-pull-stream.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { fixtures } = require('./utils') const pull = require('pull-stream') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/cat-readable-stream.js b/js/src/files/cat-readable-stream.js index 2bb5e08fa..25e49d5c8 100644 --- a/js/src/files/cat-readable-stream.js +++ b/js/src/files/cat-readable-stream.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { fixtures } = require('./utils') const bl = require('bl') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/cat.js b/js/src/files/cat.js index ab0da7041..1d54c5672 100644 --- a/js/src/files/cat.js +++ b/js/src/files/cat.js @@ -1,16 +1,11 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { fixtures } = require('./utils') const bs58 = require('bs58') const parallel = require('async/parallel') const CID = require('cids') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/cp.js b/js/src/files/cp.js index d38533994..dabd764cb 100644 --- a/js/src/files/cp.js +++ b/js/src/files/cp.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') const series = require('async/series') const hat = require('hat') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/flush.js b/js/src/files/flush.js index 638ca6063..99ffb6c0d 100644 --- a/js/src/files/flush.js +++ b/js/src/files/flush.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') const series = require('async/series') const hat = require('hat') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/get-pull-stream.js b/js/src/files/get-pull-stream.js index b87b2591f..149dcd070 100644 --- a/js/src/files/get-pull-stream.js +++ b/js/src/files/get-pull-stream.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { fixtures } = require('./utils') const pull = require('pull-stream') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/get-readable-stream.js b/js/src/files/get-readable-stream.js index 55255cdd7..504586c90 100644 --- a/js/src/files/get-readable-stream.js +++ b/js/src/files/get-readable-stream.js @@ -1,15 +1,10 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { fixtures } = require('./utils') const concat = require('concat-stream') const through = require('through2') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/get.js b/js/src/files/get.js index 0b0c439fc..ee58fd7c0 100644 --- a/js/src/files/get.js +++ b/js/src/files/get.js @@ -1,16 +1,11 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { fixtures } = require('./utils') const bs58 = require('bs58') const parallel = require('async/parallel') const series = require('async/series') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/ls.js b/js/src/files/ls.js index dbfd50637..17618739c 100644 --- a/js/src/files/ls.js +++ b/js/src/files/ls.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') const series = require('async/series') const hat = require('hat') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/mkdir.js b/js/src/files/mkdir.js index a9ce54f8e..dc9fac8e5 100644 --- a/js/src/files/mkdir.js +++ b/js/src/files/mkdir.js @@ -1,13 +1,8 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') const hat = require('hat') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/mv.js b/js/src/files/mv.js index b0727c281..2981bd344 100644 --- a/js/src/files/mv.js +++ b/js/src/files/mv.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') const series = require('async/series') const hat = require('hat') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/read.js b/js/src/files/read.js index 82b48a6ce..ac1eb8ff9 100644 --- a/js/src/files/read.js +++ b/js/src/files/read.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') const series = require('async/series') const hat = require('hat') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/rm.js b/js/src/files/rm.js index 5e002415f..79757faf8 100644 --- a/js/src/files/rm.js +++ b/js/src/files/rm.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') const series = require('async/series') const hat = require('hat') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/stat.js b/js/src/files/stat.js index 62aea64be..4d59e9e91 100644 --- a/js/src/files/stat.js +++ b/js/src/files/stat.js @@ -1,15 +1,10 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') const series = require('async/series') const hat = require('hat') const { fixtures } = require('./utils') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/files/write.js b/js/src/files/write.js index 0e9bb65f2..729d789f6 100644 --- a/js/src/files/write.js +++ b/js/src/files/write.js @@ -1,13 +1,8 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') const hat = require('hat') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/key/export.js b/js/src/key/export.js index 2d43b4412..557d323dc 100644 --- a/js/src/key/export.js +++ b/js/src/key/export.js @@ -1,13 +1,8 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const hat = require('hat') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/key/gen.js b/js/src/key/gen.js index a1648576b..355123b41 100644 --- a/js/src/key/gen.js +++ b/js/src/key/gen.js @@ -1,13 +1,8 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const hat = require('hat') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/key/import.js b/js/src/key/import.js index 90ce43308..60065d3e5 100644 --- a/js/src/key/import.js +++ b/js/src/key/import.js @@ -1,13 +1,8 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const hat = require('hat') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/key/list.js b/js/src/key/list.js index b1a92790b..705576da4 100644 --- a/js/src/key/list.js +++ b/js/src/key/list.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const times = require('async/times') const hat = require('hat') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/key/rm.js b/js/src/key/rm.js index 92f87ce30..502959687 100644 --- a/js/src/key/rm.js +++ b/js/src/key/rm.js @@ -1,13 +1,8 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const hat = require('hat') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/ls/ls-pull-stream.js b/js/src/ls/ls-pull-stream.js index e7b4602e1..48ea076b1 100644 --- a/js/src/ls/ls-pull-stream.js +++ b/js/src/ls/ls-pull-stream.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { fixtures } = require('../files/utils') const pull = require('pull-stream') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/ls/ls-readable-stream.js b/js/src/ls/ls-readable-stream.js index aa5c45f18..2f9268f02 100644 --- a/js/src/ls/ls-readable-stream.js +++ b/js/src/ls/ls-readable-stream.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { fixtures } = require('../files/utils') const concat = require('concat-stream') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/ls/ls.js b/js/src/ls/ls.js index fbc3e6425..080c96bf4 100644 --- a/js/src/ls/ls.js +++ b/js/src/ls/ls.js @@ -1,13 +1,8 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { fixtures } = require('../files/utils') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/miscellaneous/dns.js b/js/src/miscellaneous/dns.js index 19e4db528..f31e8e83e 100644 --- a/js/src/miscellaneous/dns.js +++ b/js/src/miscellaneous/dns.js @@ -1,12 +1,7 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/miscellaneous/id.js b/js/src/miscellaneous/id.js index d9da654fb..2a68ab76f 100644 --- a/js/src/miscellaneous/id.js +++ b/js/src/miscellaneous/id.js @@ -1,12 +1,7 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/miscellaneous/stop.js b/js/src/miscellaneous/stop.js index c550f6bd8..fcc7eb515 100644 --- a/js/src/miscellaneous/stop.js +++ b/js/src/miscellaneous/stop.js @@ -1,12 +1,7 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/miscellaneous/version.js b/js/src/miscellaneous/version.js index 8c7320af7..3c8e95e92 100644 --- a/js/src/miscellaneous/version.js +++ b/js/src/miscellaneous/version.js @@ -1,12 +1,7 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/object/data.js b/js/src/object/data.js index d26bb06ae..ddc94ee80 100644 --- a/js/src/object/data.js +++ b/js/src/object/data.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const bs58 = require('bs58') const hat = require('hat') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/object/get.js b/js/src/object/get.js index 42ee4a459..8a960aaa9 100644 --- a/js/src/object/get.js +++ b/js/src/object/get.js @@ -1,17 +1,12 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode const bs58 = require('bs58') const series = require('async/series') const hat = require('hat') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/object/links.js b/js/src/object/links.js index 63d891e67..18617b09f 100644 --- a/js/src/object/links.js +++ b/js/src/object/links.js @@ -1,17 +1,12 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode const bs58 = require('bs58') const series = require('async/series') const hat = require('hat') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/object/new.js b/js/src/object/new.js index 3f0019c30..8be1c790c 100644 --- a/js/src/object/new.js +++ b/js/src/object/new.js @@ -1,12 +1,7 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/object/patch/add-link.js b/js/src/object/patch/add-link.js index 202f7c3bc..e04387d0e 100644 --- a/js/src/object/patch/add-link.js +++ b/js/src/object/patch/add-link.js @@ -1,15 +1,10 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode const series = require('async/series') -const { getDescribe, getIt } = require('../../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/object/patch/append-data.js b/js/src/object/patch/append-data.js index 9cc3ee3f2..8687f26cb 100644 --- a/js/src/object/patch/append-data.js +++ b/js/src/object/patch/append-data.js @@ -1,12 +1,7 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/object/patch/rm-link.js b/js/src/object/patch/rm-link.js index ad1db0b5f..4b9241b7a 100644 --- a/js/src/object/patch/rm-link.js +++ b/js/src/object/patch/rm-link.js @@ -1,15 +1,10 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const dagPB = require('ipld-dag-pb') const DAGLink = dagPB.DAGLink const series = require('async/series') -const { getDescribe, getIt } = require('../../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/object/patch/set-data.js b/js/src/object/patch/set-data.js index 3d46302a3..9d2cb8187 100644 --- a/js/src/object/patch/set-data.js +++ b/js/src/object/patch/set-data.js @@ -1,12 +1,7 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/object/put.js b/js/src/object/put.js index 3324b37e1..0065008e1 100644 --- a/js/src/object/put.js +++ b/js/src/object/put.js @@ -1,16 +1,11 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode const series = require('async/series') const hat = require('hat') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/object/stat.js b/js/src/object/stat.js index 966bd8112..3ce04f958 100644 --- a/js/src/object/stat.js +++ b/js/src/object/stat.js @@ -1,16 +1,11 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode const bs58 = require('bs58') const series = require('async/series') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/pin/add.js b/js/src/pin/add.js index 5a1851880..3adc653d7 100644 --- a/js/src/pin/add.js +++ b/js/src/pin/add.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const each = require('async/each') const { fixtures } = require('./utils') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/pin/ls.js b/js/src/pin/ls.js index 9251b9f9c..032f60c2d 100644 --- a/js/src/pin/ls.js +++ b/js/src/pin/ls.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const parallel = require('async/parallel') const { fixtures } = require('./utils') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/pin/rm.js b/js/src/pin/rm.js index 55565bb25..80dc3dc80 100644 --- a/js/src/pin/rm.js +++ b/js/src/pin/rm.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const parallel = require('async/parallel') const { fixtures } = require('./utils') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/ping/ping-pull-stream.js b/js/src/ping/ping-pull-stream.js index 0fe689dad..ace168142 100644 --- a/js/src/ping/ping-pull-stream.js +++ b/js/src/ping/ping-pull-stream.js @@ -1,18 +1,13 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const pull = require('pull-stream') const series = require('async/series') const { spawnNodesWithId } = require('../utils/spawn') const { waitUntilConnected } = require('../utils/connections') -const { getDescribe, getIt } = require('../utils/mocha') +const { getDescribe, getIt, expect } = require('../utils/mocha') const { expectIsPingResponse, isPong } = require('./utils') -const expect = chai.expect -chai.use(dirtyChai) - module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/ping/ping-readable-stream.js b/js/src/ping/ping-readable-stream.js index df0a67e83..ecbf7f2bd 100644 --- a/js/src/ping/ping-readable-stream.js +++ b/js/src/ping/ping-readable-stream.js @@ -1,19 +1,14 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const pump = require('pump') const { Writable } = require('stream') const series = require('async/series') const { spawnNodesWithId } = require('../utils/spawn') const { waitUntilConnected } = require('../utils/connections') -const { getDescribe, getIt } = require('../utils/mocha') +const { getDescribe, getIt, expect } = require('../utils/mocha') const { expectIsPingResponse, isPong } = require('./utils') -const expect = chai.expect -chai.use(dirtyChai) - module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/ping/ping.js b/js/src/ping/ping.js index 78d495b94..b68465e37 100644 --- a/js/src/ping/ping.js +++ b/js/src/ping/ping.js @@ -1,17 +1,12 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const series = require('async/series') const { spawnNodesWithId } = require('../utils/spawn') const { waitUntilConnected } = require('../utils/connections') -const { getDescribe, getIt } = require('../utils/mocha') +const { getDescribe, getIt, expect } = require('../utils/mocha') const { expectIsPingResponse, isPong } = require('./utils') -const expect = chai.expect -chai.use(dirtyChai) - module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/ping/utils.js b/js/src/ping/utils.js index 94fe90f58..8009cada9 100644 --- a/js/src/ping/utils.js +++ b/js/src/ping/utils.js @@ -1,10 +1,6 @@ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') - -const expect = chai.expect -chai.use(dirtyChai) +const { expect } = require('../utils/mocha') function expectIsPingResponse (obj) { expect(obj).to.have.a.property('success') diff --git a/js/src/pubsub/ls.js b/js/src/pubsub/ls.js index 7659d9bbf..2b7162270 100644 --- a/js/src/pubsub/ls.js +++ b/js/src/pubsub/ls.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const each = require('async/each') const { getTopic } = require('./utils') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/pubsub/peers.js b/js/src/pubsub/peers.js index 5f72f18a8..153257859 100644 --- a/js/src/pubsub/peers.js +++ b/js/src/pubsub/peers.js @@ -1,16 +1,11 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const parallel = require('async/parallel') const auto = require('async/auto') const { spawnNodesWithId } = require('../utils/spawn') const { waitForPeers, getTopic } = require('./utils') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/pubsub/publish.js b/js/src/pubsub/publish.js index 0394a01cf..17522d088 100644 --- a/js/src/pubsub/publish.js +++ b/js/src/pubsub/publish.js @@ -1,16 +1,10 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ['error', 8] */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const times = require('async/times') const hat = require('hat') const { getTopic } = require('./utils') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/pubsub/subscribe.js b/js/src/pubsub/subscribe.js index 037d4f190..fc726d4ea 100644 --- a/js/src/pubsub/subscribe.js +++ b/js/src/pubsub/subscribe.js @@ -1,18 +1,13 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const series = require('async/series') const parallel = require('async/parallel') const times = require('async/times') const auto = require('async/auto') const { spawnNodesWithId } = require('../utils/spawn') const { waitForPeers, makeCheck, getTopic } = require('./utils') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/pubsub/unsubscribe.js b/js/src/pubsub/unsubscribe.js index cc3f5d793..70fcea30d 100644 --- a/js/src/pubsub/unsubscribe.js +++ b/js/src/pubsub/unsubscribe.js @@ -1,16 +1,10 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ['error', 8] */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const each = require('async/each') const times = require('async/times') const { getTopic } = require('./utils') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/repo/gc.js b/js/src/repo/gc.js index 4abab3944..fb108b2ba 100644 --- a/js/src/repo/gc.js +++ b/js/src/repo/gc.js @@ -1,12 +1,7 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/repo/stat.js b/js/src/repo/stat.js index 0a0569f5b..d313a9998 100644 --- a/js/src/repo/stat.js +++ b/js/src/repo/stat.js @@ -1,13 +1,8 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { expectIsRepo } = require('../stats/utils') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/repo/version.js b/js/src/repo/version.js index 9c04dcf9b..20c012a5b 100644 --- a/js/src/repo/version.js +++ b/js/src/repo/version.js @@ -1,12 +1,7 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/stats/bitswap.js b/js/src/stats/bitswap.js index d1c8ea213..b0e57f38b 100644 --- a/js/src/stats/bitswap.js +++ b/js/src/stats/bitswap.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') +const { getDescribe, getIt, expect } = require('../utils/mocha') const { expectIsBitswap } = require('./utils') -const expect = chai.expect -chai.use(dirtyChai) - module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) diff --git a/js/src/stats/bw-pull-stream.js b/js/src/stats/bw-pull-stream.js index ba810e4da..000525c5f 100644 --- a/js/src/stats/bw-pull-stream.js +++ b/js/src/stats/bw-pull-stream.js @@ -1,14 +1,9 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { expectIsBandwidth } = require('./utils') const pull = require('pull-stream') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/stats/bw-readable-stream.js b/js/src/stats/bw-readable-stream.js index 6f589dd2f..e8bc490b9 100644 --- a/js/src/stats/bw-readable-stream.js +++ b/js/src/stats/bw-readable-stream.js @@ -1,13 +1,8 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { expectIsBandwidth } = require('./utils') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/stats/bw.js b/js/src/stats/bw.js index 66c726f26..c851f4f27 100644 --- a/js/src/stats/bw.js +++ b/js/src/stats/bw.js @@ -1,13 +1,8 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { expectIsBandwidth } = require('./utils') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/stats/repo.js b/js/src/stats/repo.js index 80254bdd1..77b85e3e7 100644 --- a/js/src/stats/repo.js +++ b/js/src/stats/repo.js @@ -1,13 +1,8 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { expectIsRepo } = require('./utils') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/stats/utils.js b/js/src/stats/utils.js index 8ada68103..7d02ce0e5 100644 --- a/js/src/stats/utils.js +++ b/js/src/stats/utils.js @@ -1,6 +1,6 @@ 'use strict' -const { expect } = require('chai') +const { expect } = require('../utils/mocha') const isBigInt = (n) => { return n.constructor.name === 'Big' diff --git a/js/src/swarm/addrs.js b/js/src/swarm/addrs.js index 24c08a292..8e13dac97 100644 --- a/js/src/swarm/addrs.js +++ b/js/src/swarm/addrs.js @@ -1,12 +1,7 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/swarm/connect.js b/js/src/swarm/connect.js index 6c23847bd..e6022ac64 100644 --- a/js/src/swarm/connect.js +++ b/js/src/swarm/connect.js @@ -1,13 +1,8 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { spawnNodesWithId } = require('../utils/spawn') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/swarm/disconnect.js b/js/src/swarm/disconnect.js index 74742eeb8..4cd3d48c9 100644 --- a/js/src/swarm/disconnect.js +++ b/js/src/swarm/disconnect.js @@ -1,13 +1,8 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const { spawnNodesWithId } = require('../utils/spawn') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/swarm/local-addrs.js b/js/src/swarm/local-addrs.js index ce7988c36..7f5463310 100644 --- a/js/src/swarm/local-addrs.js +++ b/js/src/swarm/local-addrs.js @@ -1,12 +1,7 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/swarm/peers.js b/js/src/swarm/peers.js index 725d28cc4..2308383c6 100644 --- a/js/src/swarm/peers.js +++ b/js/src/swarm/peers.js @@ -1,8 +1,6 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') const auto = require('async/auto') const multiaddr = require('multiaddr') const PeerId = require('peer-id') @@ -10,10 +8,7 @@ const os = require('os') const path = require('path') const hat = require('hat') const { spawnNodesWithId } = require('../utils/spawn') -const { getDescribe, getIt } = require('../utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/types.js b/js/src/types.js index de68421e0..e40bf6b8d 100644 --- a/js/src/types.js +++ b/js/src/types.js @@ -9,12 +9,7 @@ const multiaddr = require('multiaddr') const multibase = require('multibase') const multihash = require('multihashes') const CID = require('cids') -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('./utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('./utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/util.js b/js/src/util.js index 250384bd8..3a0f32c05 100644 --- a/js/src/util.js +++ b/js/src/util.js @@ -3,12 +3,7 @@ const crypto = require('libp2p-crypto') const isIPFS = require('is-ipfs') -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const { getDescribe, getIt } = require('./utils/mocha') - -const expect = chai.expect -chai.use(dirtyChai) +const { getDescribe, getIt, expect } = require('./utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/js/src/utils/mocha.js b/js/src/utils/mocha.js index 99f633cb0..6bef20624 100644 --- a/js/src/utils/mocha.js +++ b/js/src/utils/mocha.js @@ -1,6 +1,13 @@ /* eslint-env mocha */ 'use strict' +const chai = require('chai') +const dirtyChai = require('dirty-chai') + +chai.use(dirtyChai) + +module.exports.expect = chai.expect + // Get a "describe" function that is optionally 'skipped' or 'onlyed' // If skip/only are boolean true, then we want to skip/only the whole suite function getDescribe (config) { From 0f61c26ce95041dae8da76a47d6cf3a543cbac44 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Tue, 19 Jun 2018 21:14:54 +0100 Subject: [PATCH 32/41] chore: move invalidArg var into exported function License: MIT Signed-off-by: Alan Shaw --- js/src/bootstrap/rm.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/js/src/bootstrap/rm.js b/js/src/bootstrap/rm.js index 24f6248f8..24a77b6c6 100644 --- a/js/src/bootstrap/rm.js +++ b/js/src/bootstrap/rm.js @@ -3,13 +3,13 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') -const invalidArg = 'this/Is/So/Invalid/' - module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) const common = createCommon() + const invalidArg = 'this/Is/So/Invalid/' + describe('.bootstrap.rm', function () { this.timeout(100 * 1000) From eb9e785a6c73b0e492d79a4113f720c61dff418b Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Tue, 19 Jun 2018 21:19:36 +0100 Subject: [PATCH 33/41] chore: prefer arrow functions License: MIT Signed-off-by: Alan Shaw --- js/src/dag/put.js | 2 +- js/src/dag/tree.js | 3 ++- js/src/dht/findpeer.js | 4 ++-- js/src/dht/findprovs.js | 4 ++-- 4 files changed, 7 insertions(+), 6 deletions(-) diff --git a/js/src/dag/put.js b/js/src/dag/put.js index b01621cfe..dadcf6afa 100644 --- a/js/src/dag/put.js +++ b/js/src/dag/put.js @@ -87,7 +87,7 @@ module.exports = (createCommon, options) => { }, done) }) - it('should not put dag-cbor node with wrong multicodec', function (done) { + it('should not put dag-cbor node with wrong multicodec', (done) => { ipfs.dag.put(cborNode, { format: 'dag-pb', hashAlg: 'sha3-512' diff --git a/js/src/dag/tree.js b/js/src/dag/tree.js index d0a6ed4a6..efbf4a92a 100644 --- a/js/src/dag/tree.js +++ b/js/src/dag/tree.js @@ -1,7 +1,8 @@ /* eslint-env mocha */ 'use strict' -const { series, eachSeries } = require('async') +const series = require('async/series') +const eachSeries = require('async/eachSeries') const dagPB = require('ipld-dag-pb') const dagCBOR = require('ipld-dag-cbor') const { spawnNodeWithId } = require('../utils/spawn') diff --git a/js/src/dht/findpeer.js b/js/src/dht/findpeer.js index 817742d66..daae871ac 100644 --- a/js/src/dht/findpeer.js +++ b/js/src/dht/findpeer.js @@ -45,10 +45,10 @@ module.exports = (createCommon, options) => { }) }) - it('should fail to find other peer if peer does not exist', function (done) { + it('should fail to find other peer if peer does not exist', (done) => { nodeA.dht.findpeer('Qmd7qZS4T7xXtsNFdRoK1trfMs5zU94EpokQ9WFtxdPxsZ', (err, peer) => { expect(err).to.not.exist() - expect(peer).to.be.equal(null) + expect(peer).to.not.exist() done() }) }) diff --git a/js/src/dht/findprovs.js b/js/src/dht/findprovs.js index b15262254..98f962afd 100644 --- a/js/src/dht/findprovs.js +++ b/js/src/dht/findprovs.js @@ -12,8 +12,6 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.dht.findprovs', function () { - this.timeout(80 * 1000) - let nodeA let nodeB @@ -39,6 +37,8 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) it('should provide from one node and find it through another node', function (done) { + this.timeout(80 * 1000) + waterfall([ (cb) => nodeB.object.new('unixfs-dir', cb), (dagNode, cb) => { From 965bdcb677e455292728dbfa489a75ab09370d8c Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Fri, 22 Jun 2018 09:03:13 +0100 Subject: [PATCH 34/41] test: add test for retreive empty block Added originally in https://github.com/ipfs/interface-ipfs-core/pull/308 License: MIT Signed-off-by: Alan Shaw --- js/src/block/get.js | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/js/src/block/get.js b/js/src/block/get.js index dbacdcb95..49bac54fd 100644 --- a/js/src/block/get.js +++ b/js/src/block/get.js @@ -54,6 +54,22 @@ module.exports = (createCommon, options) => { }) }) + it('should get an empty block', (done) => { + ipfs.block.put(Buffer.alloc(0), { + format: 'dag-pb', + mhtype: 'sha2-256', + version: 0 + }, (err, block) => { + expect(err).to.not.exist() + + ipfs.block.get(block.cid, (err, block) => { + expect(err).to.not.exist() + expect(block.data).to.eql(Buffer.alloc(0)) + done() + }) + }) + }) + // TODO it.skip('Promises support', (done) => {}) }) } From 2c75978f077abf594fa96bb0b3744224b45986e2 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Mon, 25 Jun 2018 10:59:19 +0100 Subject: [PATCH 35/41] chore: re-add bitswap tests Originally from PR https://github.com/ipfs/interface-ipfs-core/pull/267 License: MIT Signed-off-by: Alan Shaw --- js/src/bitswap/index.js | 10 +++++ js/src/bitswap/stat.js | 62 +++++++++++++++++++++++++++++++ js/src/bitswap/unwant.js | 76 ++++++++++++++++++++++++++++++++++++++ js/src/bitswap/utils.js | 29 +++++++++++++++ js/src/bitswap/wantlist.js | 74 +++++++++++++++++++++++++++++++++++++ js/src/index.js | 1 + 6 files changed, 252 insertions(+) create mode 100644 js/src/bitswap/index.js create mode 100644 js/src/bitswap/stat.js create mode 100644 js/src/bitswap/unwant.js create mode 100644 js/src/bitswap/utils.js create mode 100644 js/src/bitswap/wantlist.js diff --git a/js/src/bitswap/index.js b/js/src/bitswap/index.js new file mode 100644 index 000000000..3a5f5f373 --- /dev/null +++ b/js/src/bitswap/index.js @@ -0,0 +1,10 @@ +'use strict' +const { createSuite } = require('../utils/suite') + +const tests = { + stat: require('./stat'), + wantlist: require('./wantlist'), + unwant: require('./unwant') +} + +module.exports = createSuite(tests) diff --git a/js/src/bitswap/stat.js b/js/src/bitswap/stat.js new file mode 100644 index 000000000..024612474 --- /dev/null +++ b/js/src/bitswap/stat.js @@ -0,0 +1,62 @@ +/* eslint-env mocha */ +'use strict' + +const waterfall = require('async/waterfall') +const { getDescribe, getIt, expect } = require('../utils/mocha') +const { expectIsBitswap } = require('../stats/utils') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.bitswap.stat', () => { + let ipfs + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + factory.spawnNode((err, node) => { + expect(err).to.not.exist() + ipfs = node + done() + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should get bitswap stats', (done) => { + ipfs.bitswap.stat((err, res) => { + expectIsBitswap(err, res) + done() + }) + }) + + it('should get bitswap stats (promised)', () => { + return ipfs.bitswap.stat().then((res) => { + expectIsBitswap(null, res) + }) + }) + + it('should not get bitswap stats when offline', function (done) { + this.timeout(60 * 1000) + + waterfall([ + (cb) => createCommon().setup(cb), + (factory, cb) => factory.spawnNode(cb), + (node, cb) => node.stop((err) => cb(err, node)) + ], (err, node) => { + expect(err).to.not.exist() + node.bitswap.wantlist((err) => { + expect(err).to.exist() + done() + }) + }) + }) + }) +} diff --git a/js/src/bitswap/unwant.js b/js/src/bitswap/unwant.js new file mode 100644 index 000000000..abe029c08 --- /dev/null +++ b/js/src/bitswap/unwant.js @@ -0,0 +1,76 @@ +/* eslint-env mocha */ +'use strict' + +const waterfall = require('async/waterfall') +const { waitUntilConnected } = require('../utils/connections') +const { spawnNodes } = require('../utils/spawn') +const { getDescribe, getIt, expect } = require('../utils/mocha') +const { waitForWantlistKey } = require('./utils') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.bitswap.unwant', () => { + let ipfsA + let ipfsB + const key = 'QmUBdnXXPyoDFXj3Hj39dNJ5VkN3QFRskXxcGaYFBB8CNR' + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + + spawnNodes(2, factory, (err, nodes) => { + expect(err).to.not.exist() + + ipfsA = nodes[0] + ipfsB = nodes[1] + + // Add key to the wantlist for ipfsB + ipfsB.block.get(key, () => {}) + + waitUntilConnected(ipfsA, ipfsB, done) + }) + }) + }) + + after((done) => common.teardown(done)) + + it('should remove a key from the wantlist', (done) => { + waitForWantlistKey(ipfsB, key, (err) => { + expect(err).to.not.exist() + + ipfsB.bitswap.unwant(key, (err) => { + expect(err).to.not.exist() + + ipfsB.bitswap.wantlist((err, list) => { + expect(err).to.not.exist() + expect(list.Keys.every(k => k['/'] !== key)).to.equal(true) + done() + }) + }) + }) + }) + + it('should not remove a key from the wantlist when offline', function (done) { + this.timeout(60 * 1000) + + waterfall([ + (cb) => createCommon().setup(cb), + (factory, cb) => factory.spawnNode(cb), + (node, cb) => node.stop((err) => cb(err, node)) + ], (err, node) => { + expect(err).to.not.exist() + node.bitswap.wantlist((err) => { + expect(err).to.exist() + done() + }) + }) + }) + }) +} diff --git a/js/src/bitswap/utils.js b/js/src/bitswap/utils.js new file mode 100644 index 000000000..5a2c9c351 --- /dev/null +++ b/js/src/bitswap/utils.js @@ -0,0 +1,29 @@ +'use strict' + +const until = require('async/until') + +function waitForWantlistKey (ipfs, key, opts, cb) { + if (typeof opts === 'function') { + cb = opts + opts = {} + } + + opts = opts || {} + opts.timeout = opts.timeout || 1000 + + let list = { Keys: [] } + let timedOut = false + + setTimeout(() => { timedOut = true }, opts.timeout) + + const test = () => timedOut ? true : list.Keys.every(k => k['/'] === key) + const iteratee = (cb) => ipfs.bitswap.wantlist(opts.peerId, cb) + + until(test, iteratee, (err) => { + if (err) return cb(err) + if (timedOut) return cb(new Error(`Timed out waiting for ${key} in wantlist`)) + cb() + }) +} + +module.exports.waitForWantlistKey = waitForWantlistKey diff --git a/js/src/bitswap/wantlist.js b/js/src/bitswap/wantlist.js new file mode 100644 index 000000000..8718b7b09 --- /dev/null +++ b/js/src/bitswap/wantlist.js @@ -0,0 +1,74 @@ +/* eslint-env mocha */ +'use strict' + +const waterfall = require('async/waterfall') +const { waitUntilConnected } = require('../utils/connections') +const { spawnNodes } = require('../utils/spawn') +const { getDescribe, getIt, expect } = require('../utils/mocha') +const { waitForWantlistKey } = require('./utils') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.bitswap.wantlist', () => { + let ipfsA + let ipfsB + const key = 'QmUBdnXXPyoDFXj3Hj39dNJ5VkN3QFRskXxcGaYFBB8CNR' + + before(function (done) { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + common.setup((err, factory) => { + expect(err).to.not.exist() + + spawnNodes(2, factory, (err, nodes) => { + expect(err).to.not.exist() + + ipfsA = nodes[0] + ipfsB = nodes[1] + + // Add key to the wantlist for ipfsB + ipfsB.block.get(key, () => {}) + + waitUntilConnected(ipfsA, ipfsB, done) + }) + }) + }) + + after(function (done) { + this.timeout(30 * 1000) + common.teardown(done) + }) + + it('should get the wantlist', (done) => { + waitForWantlistKey(ipfsB, key, done) + }) + + it('should get the wantlist by peer ID for a diffreent node', (done) => { + ipfsB.id((err, info) => { + expect(err).to.not.exist() + waitForWantlistKey(ipfsA, key, { peerId: info.id }, done) + }) + }) + + it('should not get the wantlist when offline', function (done) { + this.timeout(60 * 1000) + + waterfall([ + (cb) => createCommon().setup(cb), + (factory, cb) => factory.spawnNode(cb), + (node, cb) => node.stop((err) => cb(err, node)) + ], (err, node) => { + expect(err).to.not.exist() + node.bitswap.wantlist((err) => { + expect(err).to.exist() + done() + }) + }) + }) + }) +} diff --git a/js/src/index.js b/js/src/index.js index 87721dafc..9da38f20e 100644 --- a/js/src/index.js +++ b/js/src/index.js @@ -1,5 +1,6 @@ 'use strict' +exports.bitswap = require('./bitswap') exports.block = require('./block') exports.bootstrap = require('./bootstrap') exports.config = require('./config') From 57418b188be918de7a3b6dd1d179be2c4aebcbd8 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Tue, 26 Jun 2018 20:49:13 +0100 Subject: [PATCH 36/41] chore: do not rely on discovery for ping tests As per original PR https://github.com/ipfs/interface-ipfs-core/pull/311 License: MIT Signed-off-by: Alan Shaw --- js/src/bitswap/unwant.js | 7 +++---- js/src/bitswap/wantlist.js | 7 +++---- js/src/ping/ping-pull-stream.js | 17 ++++++++--------- js/src/ping/ping-readable-stream.js | 17 ++++++++--------- js/src/ping/ping.js | 17 ++++++++--------- 5 files changed, 30 insertions(+), 35 deletions(-) diff --git a/js/src/bitswap/unwant.js b/js/src/bitswap/unwant.js index abe029c08..b16be1d8a 100644 --- a/js/src/bitswap/unwant.js +++ b/js/src/bitswap/unwant.js @@ -2,8 +2,7 @@ 'use strict' const waterfall = require('async/waterfall') -const { waitUntilConnected } = require('../utils/connections') -const { spawnNodes } = require('../utils/spawn') +const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') const { waitForWantlistKey } = require('./utils') @@ -25,7 +24,7 @@ module.exports = (createCommon, options) => { common.setup((err, factory) => { expect(err).to.not.exist() - spawnNodes(2, factory, (err, nodes) => { + spawnNodesWithId(2, factory, (err, nodes) => { expect(err).to.not.exist() ipfsA = nodes[0] @@ -34,7 +33,7 @@ module.exports = (createCommon, options) => { // Add key to the wantlist for ipfsB ipfsB.block.get(key, () => {}) - waitUntilConnected(ipfsA, ipfsB, done) + ipfsA.swarm.connect(ipfsB.peerId.addresses[0], done) }) }) }) diff --git a/js/src/bitswap/wantlist.js b/js/src/bitswap/wantlist.js index 8718b7b09..0606b175a 100644 --- a/js/src/bitswap/wantlist.js +++ b/js/src/bitswap/wantlist.js @@ -2,8 +2,7 @@ 'use strict' const waterfall = require('async/waterfall') -const { waitUntilConnected } = require('../utils/connections') -const { spawnNodes } = require('../utils/spawn') +const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') const { waitForWantlistKey } = require('./utils') @@ -25,7 +24,7 @@ module.exports = (createCommon, options) => { common.setup((err, factory) => { expect(err).to.not.exist() - spawnNodes(2, factory, (err, nodes) => { + spawnNodesWithId(2, factory, (err, nodes) => { expect(err).to.not.exist() ipfsA = nodes[0] @@ -34,7 +33,7 @@ module.exports = (createCommon, options) => { // Add key to the wantlist for ipfsB ipfsB.block.get(key, () => {}) - waitUntilConnected(ipfsA, ipfsB, done) + ipfsA.swarm.connect(ipfsB.peerId.addresses[0], done) }) }) }) diff --git a/js/src/ping/ping-pull-stream.js b/js/src/ping/ping-pull-stream.js index ace168142..e51c4e95d 100644 --- a/js/src/ping/ping-pull-stream.js +++ b/js/src/ping/ping-pull-stream.js @@ -4,7 +4,6 @@ const pull = require('pull-stream') const series = require('async/series') const { spawnNodesWithId } = require('../utils/spawn') -const { waitUntilConnected } = require('../utils/connections') const { getDescribe, getIt, expect } = require('../utils/mocha') const { expectIsPingResponse, isPong } = require('./utils') @@ -16,8 +15,8 @@ module.exports = (createCommon, options) => { describe('.pingPullStream', function () { this.timeout(15 * 1000) - let ipfsdA - let ipfsdB + let ipfsA + let ipfsB before(function (done) { this.timeout(60 * 1000) @@ -29,12 +28,12 @@ module.exports = (createCommon, options) => { (cb) => { spawnNodesWithId(2, factory, (err, nodes) => { if (err) return cb(err) - ipfsdA = nodes[0] - ipfsdB = nodes[1] + ipfsA = nodes[0] + ipfsB = nodes[1] cb() }) }, - (cb) => waitUntilConnected(ipfsdA, ipfsdB, cb) + (cb) => ipfsA.swarm.connect(ipfsB.peerId.addresses[0], cb) ], done) }) }) @@ -45,7 +44,7 @@ module.exports = (createCommon, options) => { let packetNum = 0 const count = 3 pull( - ipfsdA.pingPullStream(ipfsdB.peerId.id, { count }), + ipfsA.pingPullStream(ipfsB.peerId.id, { count }), pull.drain((res) => { expect(res.success).to.be.true() // It's a pong @@ -65,7 +64,7 @@ module.exports = (createCommon, options) => { const unknownPeerId = 'QmUmaEnH1uMmvckMZbh3yShaasvELPW4ZLPWnB4entMTEn' const count = 2 pull( - ipfsdA.pingPullStream(unknownPeerId, { count }), + ipfsA.pingPullStream(unknownPeerId, { count }), pull.drain((res) => { expectIsPingResponse(res) messageNum++ @@ -90,7 +89,7 @@ module.exports = (createCommon, options) => { const invalidPeerId = 'not a peer ID' const count = 2 pull( - ipfsdA.pingPullStream(invalidPeerId, { count }), + ipfsA.pingPullStream(invalidPeerId, { count }), pull.collect((err) => { expect(err).to.exist() expect(err.message).to.include('failed to parse peer address') diff --git a/js/src/ping/ping-readable-stream.js b/js/src/ping/ping-readable-stream.js index ecbf7f2bd..aad13beec 100644 --- a/js/src/ping/ping-readable-stream.js +++ b/js/src/ping/ping-readable-stream.js @@ -5,7 +5,6 @@ const pump = require('pump') const { Writable } = require('stream') const series = require('async/series') const { spawnNodesWithId } = require('../utils/spawn') -const { waitUntilConnected } = require('../utils/connections') const { getDescribe, getIt, expect } = require('../utils/mocha') const { expectIsPingResponse, isPong } = require('./utils') @@ -17,8 +16,8 @@ module.exports = (createCommon, options) => { describe('.pingReadableStream', function () { this.timeout(15 * 1000) - let ipfsdA - let ipfsdB + let ipfsA + let ipfsB before(function (done) { this.timeout(60 * 1000) @@ -30,12 +29,12 @@ module.exports = (createCommon, options) => { (cb) => { spawnNodesWithId(2, factory, (err, nodes) => { if (err) return cb(err) - ipfsdA = nodes[0] - ipfsdB = nodes[1] + ipfsA = nodes[0] + ipfsB = nodes[1] cb() }) }, - (cb) => waitUntilConnected(ipfsdA, ipfsdB, cb) + (cb) => ipfsA.swarm.connect(ipfsB.peerId.addresses[0], cb) ], done) }) }) @@ -47,7 +46,7 @@ module.exports = (createCommon, options) => { const count = 3 pump( - ipfsdA.pingReadableStream(ipfsdB.peerId.id, { count }), + ipfsA.pingReadableStream(ipfsB.peerId.id, { count }), new Writable({ objectMode: true, write (res, enc, cb) { @@ -74,7 +73,7 @@ module.exports = (createCommon, options) => { const count = 2 pump( - ipfsdA.pingReadableStream(unknownPeerId, { count }), + ipfsA.pingReadableStream(unknownPeerId, { count }), new Writable({ objectMode: true, write (res, enc, cb) { @@ -106,7 +105,7 @@ module.exports = (createCommon, options) => { const count = 2 pump( - ipfsdA.pingReadableStream(invalidPeerId, { count }), + ipfsA.pingReadableStream(invalidPeerId, { count }), new Writable({ objectMode: true, write: (chunk, enc, cb) => cb() diff --git a/js/src/ping/ping.js b/js/src/ping/ping.js index b68465e37..df58b5a5f 100644 --- a/js/src/ping/ping.js +++ b/js/src/ping/ping.js @@ -3,7 +3,6 @@ const series = require('async/series') const { spawnNodesWithId } = require('../utils/spawn') -const { waitUntilConnected } = require('../utils/connections') const { getDescribe, getIt, expect } = require('../utils/mocha') const { expectIsPingResponse, isPong } = require('./utils') @@ -15,8 +14,8 @@ module.exports = (createCommon, options) => { describe('.ping', function () { this.timeout(15 * 1000) - let ipfsdA - let ipfsdB + let ipfsA + let ipfsB before(function (done) { this.timeout(60 * 1000) @@ -28,12 +27,12 @@ module.exports = (createCommon, options) => { (cb) => { spawnNodesWithId(2, factory, (err, nodes) => { if (err) return cb(err) - ipfsdA = nodes[0] - ipfsdB = nodes[1] + ipfsA = nodes[0] + ipfsB = nodes[1] cb() }) }, - (cb) => waitUntilConnected(ipfsdA, ipfsdB, cb) + (cb) => ipfsA.swarm.connect(ipfsB.peerId.addresses[0], cb) ], done) }) }) @@ -42,7 +41,7 @@ module.exports = (createCommon, options) => { it('should send the specified number of packets', (done) => { const count = 3 - ipfsdA.ping(ipfsdB.peerId.id, { count }, (err, responses) => { + ipfsA.ping(ipfsB.peerId.id, { count }, (err, responses) => { expect(err).to.not.exist() responses.forEach(expectIsPingResponse) const pongs = responses.filter(isPong) @@ -55,7 +54,7 @@ module.exports = (createCommon, options) => { const unknownPeerId = 'QmUmaEnH1uMmvckMZbh3yShaasvELPW4ZLPWnB4entMTEn' const count = 2 - ipfsdA.ping(unknownPeerId, { count }, (err, responses) => { + ipfsA.ping(unknownPeerId, { count }, (err, responses) => { expect(err).to.exist() expect(responses[0].text).to.include('Looking up') expect(responses[1].success).to.be.false() @@ -66,7 +65,7 @@ module.exports = (createCommon, options) => { it('should fail when pinging an invalid peer', (done) => { const invalidPeerId = 'not a peer ID' const count = 2 - ipfsdA.ping(invalidPeerId, { count }, (err, responses) => { + ipfsA.ping(invalidPeerId, { count }, (err, responses) => { expect(err).to.exist() expect(err.message).to.include('failed to parse peer address') done() From 22d9b006d071b394001a4b3d9860338afdfa3049 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Tue, 26 Jun 2018 23:01:29 +0100 Subject: [PATCH 37/41] feat: allow skip reasons to be passed License: MIT Signed-off-by: Alan Shaw --- js/src/utils/mocha.js | 41 ++++++++++++++++++++++++++++++++++------- js/src/utils/suite.js | 8 ++++++-- 2 files changed, 40 insertions(+), 9 deletions(-) diff --git a/js/src/utils/mocha.js b/js/src/utils/mocha.js index 6bef20624..80c51d062 100644 --- a/js/src/utils/mocha.js +++ b/js/src/utils/mocha.js @@ -9,24 +9,51 @@ chai.use(dirtyChai) module.exports.expect = chai.expect // Get a "describe" function that is optionally 'skipped' or 'onlyed' -// If skip/only are boolean true, then we want to skip/only the whole suite +// If skip/only are boolean true, or an object with a reason property, then we +// want to skip/only the whole suite function getDescribe (config) { - if (config && config.skip === true) return describe.skip - if (config && config.only === true) return describe.only + if (config) { + if (config.only === true) return describe.only + if (config.skip === true) return describe.skip + + if (typeof config.skip === 'object' && config.skip.reason) { + const _describe = (name, impl) => { + describe.skip(`${name} (${config.skip.reason})`, impl) + } + + _describe.skip = describe.skip + _describe.only = describe.only + + return _describe + } + } + return describe } module.exports.getDescribe = getDescribe // Get an "it" function that is optionally 'skipped' or 'onlyed' -// If skip/only are an array, then we _might_ want to skip/only the specific test +// If skip/only is an array, then we _might_ want to skip/only the specific +// test if one of the items in the array is the same as the test name or if one +// of the items in the array is an object with a name property that is the same +// as the test name. function getIt (config) { + if (!config) return it + const _it = (name, impl) => { - if (config && Array.isArray(config.skip)) { - if (config.skip.includes(name)) return it.skip(name, impl) + if (Array.isArray(config.skip)) { + const skip = config.skip + .map((s) => s && typeof s === 'object' ? s : { name: s }) + .find((s) => s.name === name) + + if (skip) { + if (skip.reason) name = `${name} (${skip.reason})` + return it.skip(name, impl) + } } - if (config && Array.isArray(config.only)) { + if (Array.isArray(config.only)) { if (config.only.includes(name)) return it.only(name, impl) } diff --git a/js/src/utils/suite.js b/js/src/utils/suite.js index 2ea1707dd..7e2c88c6a 100644 --- a/js/src/utils/suite.js +++ b/js/src/utils/suite.js @@ -7,8 +7,12 @@ function createSuite (tests, parent) { const suiteName = parent ? `${parent}.${t}` : t if (Array.isArray(opts.skip)) { - if (opts.skip.includes(suiteName)) { - opts.skip = true + const skip = opts.skip + .map((s) => s && typeof s === 'object' ? s : { name: s }) + .find((s) => s.name === suiteName) + + if (skip) { + opts.skip = skip } } From a3d3f5cb91e1a9934bbc884e6ae9ebff936f9442 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Tue, 26 Jun 2018 23:20:44 +0100 Subject: [PATCH 38/41] fix: pin.rm suite name License: MIT Signed-off-by: Alan Shaw --- js/src/pin/rm.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/js/src/pin/rm.js b/js/src/pin/rm.js index 80dc3dc80..99c47312c 100644 --- a/js/src/pin/rm.js +++ b/js/src/pin/rm.js @@ -10,7 +10,7 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.pin', function () { + describe('.pin.rm', function () { this.timeout(50 * 1000) let ipfs From 4f9b635adbd329a44053045f9a90f0dc457d6ecf Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Tue, 26 Jun 2018 23:22:22 +0100 Subject: [PATCH 39/41] chore: remove unnecessary describe block License: MIT Signed-off-by: Alan Shaw --- js/src/pubsub/peers.js | 156 ++++++++++++++++++++--------------------- 1 file changed, 77 insertions(+), 79 deletions(-) diff --git a/js/src/pubsub/peers.js b/js/src/pubsub/peers.js index 153257859..7b6576f41 100644 --- a/js/src/pubsub/peers.js +++ b/js/src/pubsub/peers.js @@ -52,99 +52,97 @@ module.exports = (createCommon, options) => { ], done) }) - describe('.peers', () => { - it('should not error when not subscribed to a topic', (done) => { - const topic = getTopic() - ipfs1.pubsub.peers(topic, (err, peers) => { - expect(err).to.not.exist() - // Should be empty() but as mentioned below go-ipfs returns more than it should - // expect(peers).to.be.empty() - - done() - }) + it('should not error when not subscribed to a topic', (done) => { + const topic = getTopic() + ipfs1.pubsub.peers(topic, (err, peers) => { + expect(err).to.not.exist() + // Should be empty() but as mentioned below go-ipfs returns more than it should + // expect(peers).to.be.empty() + + done() }) + }) - it('should not return extra peers', (done) => { - // Currently go-ipfs returns peers that have not been - // subscribed to the topic. Enable when go-ipfs has been fixed - const sub1 = (msg) => {} - const sub2 = (msg) => {} - const sub3 = (msg) => {} - - const topic = getTopic() - const topicOther = topic + 'different topic' - - parallel([ - (cb) => ipfs1.pubsub.subscribe(topic, sub1, cb), - (cb) => ipfs2.pubsub.subscribe(topicOther, sub2, cb), - (cb) => ipfs3.pubsub.subscribe(topicOther, sub3, cb) - ], (err) => { - expect(err).to.not.exist() + it('should not return extra peers', (done) => { + // Currently go-ipfs returns peers that have not been + // subscribed to the topic. Enable when go-ipfs has been fixed + const sub1 = (msg) => {} + const sub2 = (msg) => {} + const sub3 = (msg) => {} - ipfs1.pubsub.peers(topic, (err, peers) => { - expect(err).to.not.exist() - expect(peers).to.be.empty() + const topic = getTopic() + const topicOther = topic + 'different topic' - parallel([ - (cb) => ipfs1.pubsub.unsubscribe(topic, sub1, cb), - (cb) => ipfs2.pubsub.unsubscribe(topicOther, sub2, cb), - (cb) => ipfs3.pubsub.unsubscribe(topicOther, sub3, cb) - ], done) - }) - }) - }) + parallel([ + (cb) => ipfs1.pubsub.subscribe(topic, sub1, cb), + (cb) => ipfs2.pubsub.subscribe(topicOther, sub2, cb), + (cb) => ipfs3.pubsub.subscribe(topicOther, sub3, cb) + ], (err) => { + expect(err).to.not.exist() - it('should return peers for a topic - one peer', (done) => { - // Currently go-ipfs returns peers that have not been - // subscribed to the topic. Enable when go-ipfs has been fixed - const sub1 = (msg) => {} - const sub2 = (msg) => {} - const sub3 = (msg) => {} - const topic = getTopic() - - auto({ - sub1: (cb) => ipfs1.pubsub.subscribe(topic, sub1, cb), - sub2: (cb) => ipfs2.pubsub.subscribe(topic, sub2, cb), - sub3: (cb) => ipfs3.pubsub.subscribe(topic, sub3, cb), - peers: ['sub1', 'sub2', 'sub3', (_, cb) => { - waitForPeers(ipfs1, topic, [ipfs2.peerId.id], cb) - }] - }, (err) => { + ipfs1.pubsub.peers(topic, (err, peers) => { expect(err).to.not.exist() + expect(peers).to.be.empty() parallel([ (cb) => ipfs1.pubsub.unsubscribe(topic, sub1, cb), - (cb) => ipfs2.pubsub.unsubscribe(topic, sub2, cb), - (cb) => ipfs3.pubsub.unsubscribe(topic, sub3, cb) + (cb) => ipfs2.pubsub.unsubscribe(topicOther, sub2, cb), + (cb) => ipfs3.pubsub.unsubscribe(topicOther, sub3, cb) ], done) }) }) + }) - it('should return peers for a topic - multiple peers', (done) => { - const sub1 = (msg) => {} - const sub2 = (msg) => {} - const sub3 = (msg) => {} - const topic = getTopic() - - auto({ - sub1: (cb) => ipfs1.pubsub.subscribe(topic, sub1, cb), - sub2: (cb) => ipfs2.pubsub.subscribe(topic, sub2, cb), - sub3: (cb) => ipfs3.pubsub.subscribe(topic, sub3, cb), - peers: ['sub1', 'sub2', 'sub3', (_, cb) => { - waitForPeers(ipfs1, topic, [ - ipfs2.peerId.id, - ipfs3.peerId.id - ], cb) - }] - }, (err) => { - expect(err).to.not.exist() + it('should return peers for a topic - one peer', (done) => { + // Currently go-ipfs returns peers that have not been + // subscribed to the topic. Enable when go-ipfs has been fixed + const sub1 = (msg) => {} + const sub2 = (msg) => {} + const sub3 = (msg) => {} + const topic = getTopic() + + auto({ + sub1: (cb) => ipfs1.pubsub.subscribe(topic, sub1, cb), + sub2: (cb) => ipfs2.pubsub.subscribe(topic, sub2, cb), + sub3: (cb) => ipfs3.pubsub.subscribe(topic, sub3, cb), + peers: ['sub1', 'sub2', 'sub3', (_, cb) => { + waitForPeers(ipfs1, topic, [ipfs2.peerId.id], cb) + }] + }, (err) => { + expect(err).to.not.exist() - parallel([ - (cb) => ipfs1.pubsub.unsubscribe(topic, sub1, cb), - (cb) => ipfs2.pubsub.unsubscribe(topic, sub2, cb), - (cb) => ipfs3.pubsub.unsubscribe(topic, sub3, cb) - ], done) - }) + parallel([ + (cb) => ipfs1.pubsub.unsubscribe(topic, sub1, cb), + (cb) => ipfs2.pubsub.unsubscribe(topic, sub2, cb), + (cb) => ipfs3.pubsub.unsubscribe(topic, sub3, cb) + ], done) + }) + }) + + it('should return peers for a topic - multiple peers', (done) => { + const sub1 = (msg) => {} + const sub2 = (msg) => {} + const sub3 = (msg) => {} + const topic = getTopic() + + auto({ + sub1: (cb) => ipfs1.pubsub.subscribe(topic, sub1, cb), + sub2: (cb) => ipfs2.pubsub.subscribe(topic, sub2, cb), + sub3: (cb) => ipfs3.pubsub.subscribe(topic, sub3, cb), + peers: ['sub1', 'sub2', 'sub3', (_, cb) => { + waitForPeers(ipfs1, topic, [ + ipfs2.peerId.id, + ipfs3.peerId.id + ], cb) + }] + }, (err) => { + expect(err).to.not.exist() + + parallel([ + (cb) => ipfs1.pubsub.unsubscribe(topic, sub1, cb), + (cb) => ipfs2.pubsub.unsubscribe(topic, sub2, cb), + (cb) => ipfs3.pubsub.unsubscribe(topic, sub3, cb) + ], done) }) }) }) From 7a9c4d05e08ccf3b922869fabec00d0c2ff87cc9 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Tue, 26 Jun 2018 23:27:11 +0100 Subject: [PATCH 40/41] fix: ensure there is another node to provide to License: MIT Signed-off-by: Alan Shaw --- js/src/dht/provide.js | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/js/src/dht/provide.js b/js/src/dht/provide.js index d26e9b5fc..91c71f8e6 100644 --- a/js/src/dht/provide.js +++ b/js/src/dht/provide.js @@ -2,6 +2,7 @@ 'use strict' const CID = require('cids') +const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -22,10 +23,10 @@ module.exports = (createCommon, options) => { common.setup((err, factory) => { expect(err).to.not.exist() - factory.spawnNode((err, node) => { + spawnNodesWithId(2, factory, (err, nodes) => { expect(err).to.not.exist() - ipfs = node - done() + ipfs = nodes[0] + ipfs.swarm.connect(nodes[1].peerId.addresses[0], done) }) }) }) From 616a3e0b1850b9856f00459959942d6c129889ba Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Wed, 27 Jun 2018 22:11:30 +0100 Subject: [PATCH 41/41] fix: remove old unused bitswap tests License: MIT Signed-off-by: Alan Shaw --- js/src/bitswap.js | 144 ---------------------------------------------- 1 file changed, 144 deletions(-) delete mode 100644 js/src/bitswap.js diff --git a/js/src/bitswap.js b/js/src/bitswap.js deleted file mode 100644 index 46febe6d7..000000000 --- a/js/src/bitswap.js +++ /dev/null @@ -1,144 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const series = require('async/series') -const expect = chai.expect -const statsTests = require('./utils/stats') -const spawn = require('./utils/spawn') -chai.use(dirtyChai) -const CID = require('cids') - -module.exports = (common) => { - describe('.bitswap online', () => { - let ipfsA - let ipfsB - let withGo - let ipfsBId - const key = 'QmUBdnXXPyoDFXj3Hj39dNJ5VkN3QFRskXxcGaYFBB8CNR' - - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - series([ - (cb) => spawn.spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - ipfsA = node - withGo = node.peerId.agentVersion.startsWith('go-ipfs') - cb() - }), - (cb) => spawn.spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - ipfsB = node - ipfsBId = node.peerId - ipfsB.block.get(new CID(key)) - .then(() => {}) - .catch(() => {}) - ipfsA.swarm.connect(ipfsBId.addresses[0], (err) => { - expect(err).to.not.exist() - setTimeout(cb, 350) - }) - }) - ], done) - }) - }) - - after((done) => common.teardown(done)) - - it('.stat', (done) => { - ipfsB.bitswap.stat((err, stats) => { - expect(err).to.not.exist() - statsTests.expectIsBitswap(err, stats) - done() - }) - }) - - it('.wantlist', (done) => { - ipfsB.bitswap.wantlist((err, list) => { - expect(err).to.not.exist() - expect(list.Keys).to.have.length(1) - expect(list.Keys[0]['/']).to.equal(key) - done() - }) - }) - - it('.wantlist peerid', (done) => { - ipfsA.bitswap.wantlist(ipfsBId.id, (err, list) => { - expect(err).to.not.exist() - expect(list.Keys[0]['/']).to.equal(key) - done() - }) - }) - - it('.unwant', function (done) { - if (withGo) { - this.skip() - } - ipfsB.bitswap.unwant(key, (err) => { - expect(err).to.not.exist() - ipfsB.bitswap.wantlist((err, list) => { - expect(err).to.not.exist() - expect(list.Keys).to.be.empty() - done() - }) - }) - }) - }) - - describe('.bitswap offline', () => { - let ipfs - - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - ipfs.id((err, id) => { - expect(err).to.not.exist() - ipfs.stop((err) => { - // TODO: go-ipfs returns an error, https://github.com/ipfs/go-ipfs/issues/4078 - if (!id.agentVersion.startsWith('go-ipfs')) { - expect(err).to.not.exist() - } - done() - }) - }) - }) - }) - }) - - it('.stat gives error while offline', (done) => { - ipfs.bitswap.stat((err, stats) => { - expect(err).to.exist() - expect(stats).to.not.exist() - done() - }) - }) - - it('.wantlist gives error if offline', (done) => { - ipfs.bitswap.wantlist((err, list) => { - expect(err).to.exist() - expect(list).to.not.exist() - done() - }) - }) - - it('.unwant gives error if offline', (done) => { - const key = 'QmUBdnXXPyoDFXj3Hj39dNJ5VkN3QFRskXxcGaYFBB8CNR' - ipfs.bitswap.unwant(key, (err) => { - expect(err).to.exist() - done() - }) - }) - }) -}