diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..d0cf281 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,40 @@ +language: node_js +cache: npm +stages: + - check + - test + - cov + +node_js: + - '10' + +os: + - linux + - osx + - windows + +script: npx nyc -s npm run test:node -- --bail +after_success: npx nyc report --reporter=text-lcov > coverage.lcov && npx codecov + +jobs: + include: + - stage: check + script: + - npx aegir commitlint --travis + - npx aegir dep-check + - npm run lint + + - stage: test + name: chrome + addons: + chrome: stable + script: npx aegir test -t browser -t webworker + + - stage: test + name: firefox + addons: + firefox: latest + script: npx aegir test -t browser -t webworker -- --browsers FirefoxHeadless + +notifications: + email: false diff --git a/ci/Jenkinsfile b/ci/Jenkinsfile deleted file mode 100644 index a7da2e5..0000000 --- a/ci/Jenkinsfile +++ /dev/null @@ -1,2 +0,0 @@ -// Warning: This file is automatically synced from https://github.com/ipfs/ci-sync so if you want to change it, please change it there and ask someone to sync all repositories. -javascript() diff --git a/package.json b/package.json index ae8bb05..387b1d9 100644 --- a/package.json +++ b/package.json @@ -2,19 +2,20 @@ "name": "datastore-fs", "version": "0.8.0", "description": "Datastore implementation with file system backend", - "leadMaintainer": "Pedro Teixeira ", + "leadMaintainer": "Alex Potsides ", "main": "src/index.js", "scripts": { + "test": "aegir test", + "test:node": "aegir test -t node", + "test:browser": "aegir test -t browser", + "test:webworker": "aegir test -t webworker", + "build": "aegir build", "lint": "aegir lint", - "test": "aegir test --target node", - "test:node": "aegir test --target node", - "flow": "flow", - "release": "aegir release --target node --docs", - "release-minor": "aegir release --type minor --target node --docs", - "release-major": "aegir release --type major --target node --docs", - "coverage": "aegir coverage --timeout 10000", - "coverage-publish": "aegir coverage --provider codecov --timeout 10000", - "docs": "aegir docs" + "release": "aegir release", + "release-minor": "aegir release --type minor", + "release-major": "aegir release --type major", + "coverage": "nyc -s npm run test:node && nyc report --reporter=html", + "dep-check": "aegir dep-check" }, "repository": { "type": "git", @@ -34,23 +35,22 @@ }, "homepage": "https://github.com/ipfs/js-datastore-fs#readme", "dependencies": { - "async": "^2.6.1", - "datastore-core": "~0.6.0", + "datastore-core": "~0.7.0", "fast-write-atomic": "~0.2.0", "glob": "^7.1.3", - "graceful-fs": "^4.1.11", - "interface-datastore": "~0.6.0", - "mkdirp": "~0.5.1", - "pull-stream": "^3.6.9" + "interface-datastore": "~0.7.0", + "mkdirp": "~0.5.1" }, "devDependencies": { - "aegir": "^15.3.1", + "aegir": "^19.0.3", + "async-iterator-all": "^1.0.0", "chai": "^4.2.0", - "cids": "~0.5.5", + "cids": "~0.7.1", + "detect-node": "^2.0.4", "dirty-chai": "^2.0.1", - "flow-bin": "~0.84.0", + "flow-bin": "~0.99.0", "flow-typed": "^2.5.1", - "memdown": "^1.4.1", + "memdown": "^4.0.0", "rimraf": "^2.6.2" }, "contributors": [ diff --git a/src/index.js b/src/index.js index d5b1826..21037ff 100644 --- a/src/index.js +++ b/src/index.js @@ -3,19 +3,25 @@ /* :: import type {Batch, Query, QueryResult, Callback} from 'interface-datastore' */ -const fs = require('graceful-fs') -const pull = require('pull-stream') +const fs = require('fs') const glob = require('glob') -const setImmediate = require('async/setImmediate') -const waterfall = require('async/series') -const each = require('async/each') const mkdirp = require('mkdirp') -const writeFile = require('fast-write-atomic') +const promisify = require('util').promisify +const writeFile = promisify(require('fast-write-atomic')) const path = require('path') -const asyncFilter = require('interface-datastore').utils.asyncFilter -const asyncSort = require('interface-datastore').utils.asyncSort +const filter = require('interface-datastore').utils.filter +const take = require('interface-datastore').utils.take +const map = require('interface-datastore').utils.map +const sortAll = require('interface-datastore').utils.sortAll const IDatastore = require('interface-datastore') + +const noop = () => {} +const asyncMkdirp = promisify(require('mkdirp')) +const fsAccess = promisify(fs.access || noop) +const fsReadFile = promisify(fs.readFile || noop) +const fsUnlink = promisify(fs.unlink || noop) + const Key = IDatastore.Key const Errors = IDatastore.Errors @@ -57,9 +63,8 @@ class FsDatastore { } } - open (callback /* : Callback */) /* : void */ { + open () /* : void */ { this._openOrCreate() - setImmediate(callback) } /** @@ -69,11 +74,11 @@ class FsDatastore { */ _open () { if (!fs.existsSync(this.path)) { - throw new Error(`Datastore directory: ${this.path} does not exist`) + throw Errors.notFoundError(new Error(`Datastore directory: ${this.path} does not exist`)) } if (this.opts.errorIfExists) { - throw new Error(`Datastore directory: ${this.path} already exists`) + throw Errors.dbOpenFailedError(new Error(`Datastore directory: ${this.path} already exists`)) } } @@ -97,7 +102,7 @@ class FsDatastore { try { this._open() } catch (err) { - if (err.message.match('does not exist')) { + if (err.code === 'ERR_NOT_FOUND') { this._create() return } @@ -150,16 +155,13 @@ class FsDatastore { * * @param {Key} key * @param {Buffer} val - * @param {function(Error)} callback - * @returns {void} + * @returns {Promise} */ - putRaw (key /* : Key */, val /* : Buffer */, callback /* : Callback */) /* : void */ { + async putRaw (key /* : Key */, val /* : Buffer */) /* : void */ { const parts = this._encode(key) const file = parts.file.slice(0, -this.opts.extension.length) - waterfall([ - (cb) => mkdirp(parts.dir, { fs: fs }, cb), - (cb) => writeFile(file, val, cb) - ], (err) => callback(err)) + await asyncMkdirp(parts.dir, { fs: fs }) + await writeFile(file, val) } /** @@ -167,87 +169,87 @@ class FsDatastore { * * @param {Key} key * @param {Buffer} val - * @param {function(Error)} callback - * @returns {void} + * @returns {Promise} */ - put (key /* : Key */, val /* : Buffer */, callback /* : Callback */) /* : void */ { + async put (key /* : Key */, val /* : Buffer */) /* : void */ { const parts = this._encode(key) - waterfall([ - (cb) => mkdirp(parts.dir, { fs: fs }, cb), - (cb) => writeFile(parts.file, val, cb) - ], (err) => { - if (err) { - return callback(Errors.dbWriteFailedError(err)) - } - callback() - }) + try { + await asyncMkdirp(parts.dir, { fs: fs }) + await writeFile(parts.file, val) + } catch (err) { + throw Errors.dbWriteFailedError(err) + } } /** * Read from the file system without extension. * * @param {Key} key - * @param {function(Error, Buffer)} callback - * @returns {void} + * @returns {Promise} */ - getRaw (key /* : Key */, callback /* : Callback */) /* : void */ { + async getRaw (key /* : Key */) /* : void */ { const parts = this._encode(key) let file = parts.file file = file.slice(0, -this.opts.extension.length) - fs.readFile(file, (err, data) => { - if (err) { - return callback(Errors.notFoundError(err)) - } - callback(null, data) - }) + let data + try { + data = await fsReadFile(file) + } catch (err) { + throw Errors.notFoundError(err) + } + return data } /** * Read from the file system. * * @param {Key} key - * @param {function(Error, Buffer)} callback - * @returns {void} + * @returns {Promise} */ - get (key /* : Key */, callback /* : Callback */) /* : void */ { + async get (key /* : Key */) /* : void */ { const parts = this._encode(key) - fs.readFile(parts.file, (err, data) => { - if (err) { - return callback(Errors.notFoundError(err)) - } - callback(null, data) - }) + let data + try { + data = await fsReadFile(parts.file) + } catch (err) { + throw Errors.notFoundError(err) + } + return data } /** * Check for the existence of the given key. * * @param {Key} key - * @param {function(Error, bool)} callback - * @returns {void} + * @returns {Promise} */ - has (key /* : Key */, callback /* : Callback */) /* : void */ { + async has (key /* : Key */) /* : void */ { const parts = this._encode(key) - fs.access(parts.file, err => { - callback(null, !err) - }) + try { + await fsAccess(parts.file) + } catch (err) { + return false + } + return true } /** * Delete the record under the given key. * * @param {Key} key - * @param {function(Error)} callback - * @returns {void} + * @returns {Promise} */ - delete (key /* : Key */, callback /* : Callback */) /* : void */ { + async delete (key /* : Key */) /* : void */ { const parts = this._encode(key) - fs.unlink(parts.file, (err) => { - if (err) { - return callback(Errors.dbDeleteFailedError(err)) + try { + await fsUnlink(parts.file) + } catch (err) { + if (err.code === 'ENOENT') { + return } - callback() - }) + + throw Errors.dbDeleteFailedError(err) + } } /** @@ -265,15 +267,14 @@ class FsDatastore { delete (key /* : Key */) /* : void */ { deletes.push(key) }, - commit: (callback /* : (err: ?Error) => void */) => { - waterfall([ - (cb) => each(puts, (p, cb) => { - this.put(p.key, p.value, cb) - }, cb), - (cb) => each(deletes, (k, cb) => { - this.delete(k, cb) - }, cb) - ], (err) => callback(err)) + commit: () /* : Promise */ => { + return Promise.all( + puts + .map((put) => this.put(put.key, put.value)) + .concat( + deletes.map((del) => this.delete(del)) + ) + ) } } } @@ -282,7 +283,7 @@ class FsDatastore { * Query the store. * * @param {Object} q - * @returns {PullStream} + * @returns {Iterable} */ query (q /* : Query */) /* : QueryResult */ { // glob expects a POSIX path @@ -291,53 +292,44 @@ class FsDatastore { .join(this.path, prefix, '*' + this.opts.extension) .split(path.sep) .join('/') - let tasks = [pull.values(glob.sync(pattern))] - + let files = glob.sync(pattern) + let it if (!q.keysOnly) { - tasks.push(pull.asyncMap((f, cb) => { - fs.readFile(f, (err, buf) => { - if (err) { - return cb(err) - } - cb(null, { - key: this._decode(f), - value: buf - }) - }) - })) + it = map(files, async (f) => { + const buf = await fsReadFile(f) + return { + key: this._decode(f), + value: buf + } + }) } else { - tasks.push(pull.map(f => ({ key: this._decode(f) }))) + it = map(files, f => ({ key: this._decode(f) })) } - if (q.filters != null) { - tasks = tasks.concat(q.filters.map(asyncFilter)) + if (Array.isArray(q.filters)) { + it = q.filters.reduce((it, f) => filter(it, f), it) } - if (q.orders != null) { - tasks = tasks.concat(q.orders.map(asyncSort)) + if (Array.isArray(q.orders)) { + it = q.orders.reduce((it, f) => sortAll(it, f), it) } if (q.offset != null) { let i = 0 - tasks.push(pull.filter(() => i++ >= q.offset)) + it = filter(it, () => i++ >= q.offset) } if (q.limit != null) { - tasks.push(pull.take(q.limit)) + it = take(it, q.limit) } - return pull.apply(null, tasks) + return it } /** * Close the store. - * - * @param {function(Error)} callback - * @returns {void} */ - close (callback /* : (err: ?Error) => void */) /* : void */ { - setImmediate(callback) - } + close () /* : Promise */ { } } module.exports = FsDatastore diff --git a/test/index.spec.js b/test/index.spec.js index 9e7e275..54590e7 100644 --- a/test/index.spec.js +++ b/test/index.spec.js @@ -5,22 +5,30 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect -const pull = require('pull-stream') const path = require('path') +const promisify = require('util').promisify +const noop = () => {} const mkdirp = require('mkdirp') -const rimraf = require('rimraf') -const waterfall = require('async/waterfall') -const parallel = require('async/parallel') +const rimraf = promisify(require('rimraf')) const fs = require('fs') - +const fsReadFile = promisify(require('fs').readFile || noop) const Key = require('interface-datastore').Key const utils = require('interface-datastore').utils const ShardingStore = require('datastore-core').ShardingDatastore const sh = require('datastore-core').shard +const isNode = require('detect-node') const FsStore = require('../src') describe('FsDatastore', () => { + if (!isNode) { + it('only supports node.js', () => { + + }) + + return + } + describe('construction', () => { it('defaults - folder missing', () => { const dir = utils.tmpdir() @@ -40,7 +48,7 @@ describe('FsDatastore', () => { it('createIfMissing: false - folder missing', () => { const dir = utils.tmpdir() expect( - () => new FsStore(dir, {createIfMissing: false}) + () => new FsStore(dir, { createIfMissing: false }) ).to.throw() }) @@ -48,7 +56,7 @@ describe('FsDatastore', () => { const dir = utils.tmpdir() mkdirp.sync(dir) expect( - () => new FsStore(dir, {errorIfExists: true}) + () => new FsStore(dir, { errorIfExists: true }) ).to.throw() }) }) @@ -71,71 +79,83 @@ describe('FsDatastore', () => { ) }) - it('sharding files', (done) => { + it('deleting files', async () => { + const dir = utils.tmpdir() + const fs = new FsStore(dir) + const key = new Key('1234') + + await fs.put(key, Buffer.from([0, 1, 2, 3])) + await fs.delete(key) + + try { + await fs.get(key) + throw new Error('Should have errored') + } catch (err) { + expect(err.code).to.equal('ERR_NOT_FOUND') + } + }) + + it('deleting non-existent files', async () => { + const dir = utils.tmpdir() + const fs = new FsStore(dir) + const key = new Key('5678') + + await fs.delete(key) + + try { + await fs.get(key) + throw new Error('Should have errored') + } catch (err) { + expect(err.code).to.equal('ERR_NOT_FOUND') + } + }) + + it('sharding files', async () => { const dir = utils.tmpdir() const fstore = new FsStore(dir) const shard = new sh.NextToLast(2) - waterfall([ - (cb) => ShardingStore.create(fstore, shard, cb), - (cb) => fs.readFile(path.join(dir, sh.SHARDING_FN), cb), - (file, cb) => { - expect(file.toString()).to.be.eql('/repo/flatfs/shard/v1/next-to-last/2\n') - fs.readFile(path.join(dir, sh.README_FN), cb) - }, - (readme, cb) => { - expect(readme.toString()).to.be.eql(sh.readme) - cb() - }, - (cb) => rimraf(dir, cb) - ], done) + await ShardingStore.create(fstore, shard) + + const file = await fsReadFile(path.join(dir, sh.SHARDING_FN)) + expect(file.toString()).to.be.eql('/repo/flatfs/shard/v1/next-to-last/2\n') + + const readme = await fsReadFile(path.join(dir, sh.README_FN)) + expect(readme.toString()).to.be.eql(sh.readme) + await rimraf(dir) }) - it('query', (done) => { + it('query', async () => { const fs = new FsStore(path.join(__dirname, 'test-repo', 'blocks')) - - pull( - fs.query({}), - pull.collect((err, res) => { - expect(err).to.not.exist() - expect(res).to.have.length(23) - done() - }) - ) + let res = [] + for await (const q of fs.query({})) { + res.push(q) + } + expect(res).to.have.length(23) }) - it('interop with go', (done) => { + it('interop with go', async () => { const repodir = path.join(__dirname, '/test-repo/blocks') const fstore = new FsStore(repodir) const key = new Key('CIQGFTQ7FSI2COUXWWLOQ45VUM2GUZCGAXLWCTOKKPGTUWPXHBNIVOY') const expected = fs.readFileSync(path.join(repodir, 'VO', key.toString() + '.data')) - - waterfall([ - (cb) => ShardingStore.open(fstore, cb), - (flatfs, cb) => parallel([ - (cb) => pull( - flatfs.query({}), - pull.collect(cb) - ), - (cb) => flatfs.get(key, cb) - ], (err, res) => { - expect(err).to.not.exist() - expect(res[0]).to.have.length(23) - expect(res[1]).to.be.eql(expected) - - cb() - }) - ], done) + const flatfs = await ShardingStore.open(fstore) + let res = await flatfs.get(key) + let queryResult = flatfs.query({}) + let results = [] + for await (const result of queryResult) results.push(result) + expect(results).to.have.length(23) + expect(res).to.be.eql(expected) }) describe('interface-datastore', () => { const dir = utils.tmpdir() require('interface-datastore/src/tests')({ - setup (callback) { - callback(null, new FsStore(dir)) + setup: () => { + return new FsStore(dir) }, - teardown (callback) { - rimraf(dir, callback) + teardown: () => { + return rimraf(dir) } }) }) @@ -144,12 +164,12 @@ describe('FsDatastore', () => { const dir = utils.tmpdir() require('interface-datastore/src/tests')({ - setup (callback) { + setup: () => { const shard = new sh.NextToLast(2) - ShardingStore.createOrOpen(new FsStore(dir), shard, callback) + return ShardingStore.createOrOpen(new FsStore(dir), shard) }, - teardown (callback) { - rimraf(dir, callback) + teardown: () => { + return rimraf(dir) } }) })