From e1dd24ee79a8c330b4827b78b3b5e6a8671996eb Mon Sep 17 00:00:00 2001 From: Hugo Dias Date: Wed, 24 Jul 2019 15:02:24 +0100 Subject: [PATCH 1/8] feat: add support for ipns and recursive to ipfs resolve This PR add IPNS support to resolve, makes the recursive option true by default and reworks the tests. Jsdocs were add to the resolve methods. Two interface-core config profile tests needed to be skipped because js-ipfs doesn't support them yet needs: https://github.com/ipfs/interface-js-ipfs-core/pull/504 --- package.json | 2 +- src/cli/commands/resolve.js | 4 +- src/core/components/resolve.js | 98 ++++++++++++++++++------------- src/http/api/resources/resolve.js | 2 +- test/cli/resolve.js | 74 ----------------------- test/http-api/inject/resolve.js | 26 -------- 6 files changed, 62 insertions(+), 144 deletions(-) delete mode 100644 test/cli/resolve.js diff --git a/package.json b/package.json index 0b87ec4e7b..ad686eba63 100644 --- a/package.json +++ b/package.json @@ -75,7 +75,6 @@ "bs58": "^4.0.1", "buffer-peek-stream": "^1.0.1", "byteman": "^1.3.5", - "callbackify": "^1.1.0", "cid-tool": "~0.3.0", "cids": "~0.7.1", "class-is": "^1.1.0", @@ -155,6 +154,7 @@ "peer-id": "~0.12.3", "peer-info": "~0.15.0", "progress": "^2.0.1", + "promise-nodeify": "^3.0.1", "promisify-es6": "^1.0.3", "protons": "^1.0.1", "pull-abortable": "^4.1.1", diff --git a/src/cli/commands/resolve.js b/src/cli/commands/resolve.js index beefd749a3..5d2fcfdb21 100644 --- a/src/cli/commands/resolve.js +++ b/src/cli/commands/resolve.js @@ -20,11 +20,11 @@ module.exports = { } }, - handler ({ getIpfs, print, name, recursive, cidBase, resolve }) { + handler ({ getIpfs, name, recursive, cidBase, resolve }) { resolve((async () => { const ipfs = await getIpfs() const res = await ipfs.resolve(name, { recursive, cidBase }) - print(res) + return res })()) } } diff --git a/src/core/components/resolve.js b/src/core/components/resolve.js index db9039551e..497ba2447a 100644 --- a/src/core/components/resolve.js +++ b/src/core/components/resolve.js @@ -1,64 +1,82 @@ 'use strict' -const promisify = require('promisify-es6') const isIpfs = require('is-ipfs') -const setImmediate = require('async/setImmediate') const CID = require('cids') +const nodeify = require('promise-nodeify') const { cidToString } = require('../../utils/cid') -module.exports = (self) => { - return promisify(async (name, opts, cb) => { - if (typeof opts === 'function') { - cb = opts - opts = {} - } +/** + * @typedef { import("../index") } IPFS + */ - opts = opts || {} +/** + * @typedef {Object} ResolveOptions + * @prop {string} cidBase - Multibase codec name the CID in the resolved path will be encoded with + * @prop {boolean} [recursive=true] - Resolve until the result is an IPFS name + * + */ +/** @typedef {(err: Error, path: string) => void} ResolveCallback */ + +/** + * @callback ResolveWrapper - This wrapper adds support for callbacks and promises + * @param {string} name - Path to resolve + * @param {ResolveOptions} opts - Options for resolve + * @param {ResolveCallback} [cb] - Optional callback function + * @returns {Promise | void} - When callback is provided nothing is returned + */ + +/** + * IPFS Resolve factory + * + * @param {IPFS} ipfs + * @returns {ResolveWrapper} + */ +module.exports = (ipfs) => { + /** + * IPFS Resolve - Resolve the value of names to IPFS + * + * @param {String} name + * @param {ResolveOptions} [opts={}] + * @returns {Promise} + */ + const resolve = async (name, opts = {}) => { if (!isIpfs.path(name)) { - return setImmediate(() => cb(new Error('invalid argument ' + name))) + throw new Error('invalid argument ' + name) } - // TODO remove this and update subsequent code when IPNS is implemented - if (!isIpfs.ipfsPath(name)) { - return setImmediate(() => cb(new Error('resolve non-IPFS names is not implemented'))) + if (isIpfs.ipnsPath(name)) { + name = await ipfs.name.resolve(name, opts) } - const split = name.split('/') // ['', 'ipfs', 'hash', ...path] - const cid = new CID(split[2]) + const [, , hash, ...rest] = name.split('/') // ['', 'ipfs', 'hash', ...path] + const cid = new CID(hash) - if (split.length === 3) { - return setImmediate(() => cb(null, `/ipfs/${cidToString(cid, { base: opts.cidBase })}`)) + // nothing to resolve return the input + if (rest.length === 0) { + return `/ipfs/${cidToString(cid, { base: opts.cidBase })}` } - const path = split.slice(3).join('/') - - const results = self._ipld.resolve(cid, path) + const path = rest.join('/') + const results = ipfs._ipld.resolve(cid, path) let value = cid let remainderPath = path - try { - for await (const result of results) { - if (result.remainderPath === '') { - // Use values from previous iteration if the value isn't a CID - if (CID.isCID(result.value)) { - value = result.value - remainderPath = '' - } - - if (result.value && CID.isCID(result.value.Hash)) { - value = result.value.Hash - remainderPath = '' - } - - break - } + for await (const result of results) { + if (CID.isCID(result.value)) { value = result.value remainderPath = result.remainderPath } - } catch (error) { - return cb(error) } - return cb(null, `/ipfs/${cidToString(value, { base: opts.cidBase })}${remainderPath ? '/' + remainderPath : ''}`) - }) + + return `/ipfs/${cidToString(value, { base: opts.cidBase })}${remainderPath ? '/' + remainderPath : ''}` + } + + return (name, opts = {}, cb) => { + if (typeof opts === 'function') { + cb = opts + opts = {} + } + return nodeify(resolve(name, opts), cb) + } } diff --git a/src/http/api/resources/resolve.js b/src/http/api/resources/resolve.js index e55455cd9b..85a83f5912 100644 --- a/src/http/api/resources/resolve.js +++ b/src/http/api/resources/resolve.js @@ -24,7 +24,7 @@ module.exports = { async handler (request, h) { const { ipfs } = request.server.app const name = request.query.arg - const recursive = request.query.r || request.query.recursive || false + const recursive = request.query.r || request.query.recursive || true const cidBase = request.query['cid-base'] log(name, { recursive, cidBase }) diff --git a/test/cli/resolve.js b/test/cli/resolve.js deleted file mode 100644 index 190eba0c66..0000000000 --- a/test/cli/resolve.js +++ /dev/null @@ -1,74 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const path = require('path') -const expect = require('chai').expect -const isIpfs = require('is-ipfs') -const CID = require('cids') - -const runOnAndOff = require('../utils/on-and-off') - -describe('resolve', () => runOnAndOff((thing) => { - let ipfs - - before(() => { - ipfs = thing.ipfs - }) - - it('should resolve an IPFS hash', () => { - const filePath = path.join(path.resolve(__dirname, '..', '..'), 'src/init-files/init-docs/readme') - let hash - - return ipfs(`add ${filePath}`) - .then((out) => { - hash = out.split(' ')[1] - expect(isIpfs.cid(hash)).to.be.true() - return ipfs(`resolve /ipfs/${hash}`) - }) - .then((out) => { - expect(out).to.contain(`/ipfs/${hash}`) - }) - }) - - it('should resolve an IPFS hash and print CID encoded in specified base', function () { - this.timeout(10 * 1000) - - const filePath = path.join(path.resolve(__dirname, '..', '..'), 'src/init-files/init-docs/readme') - let b58Hash - let b64Hash - - return ipfs(`add ${filePath}`) - .then((out) => { - b58Hash = out.split(' ')[1] - expect(isIpfs.cid(b58Hash)).to.be.true() - b64Hash = new CID(b58Hash).toV1().toBaseEncodedString('base64') - return ipfs(`resolve /ipfs/${b58Hash} --cid-base=base64`) - }) - .then((out) => { - expect(out).to.contain(`/ipfs/${b64Hash}`) - }) - }) - - it('should resolve an IPFS path link', function () { - this.timeout(10 * 1000) - - const filePath = path.join(path.resolve(__dirname, '..', '..'), 'src/init-files/init-docs/readme') - let fileHash, rootHash - - return ipfs(`add ${filePath} --wrap-with-directory`) - .then((out) => { - const lines = out.split('\n') - - fileHash = lines[0].split(' ')[1] - rootHash = lines[1].split(' ')[1] - - expect(isIpfs.cid(fileHash)).to.be.true() - expect(isIpfs.cid(rootHash)).to.be.true() - - return ipfs(`resolve /ipfs/${rootHash}/readme`) - }) - .then((out) => { - expect(out).to.contain(`/ipfs/${fileHash}`) - }) - }) -})) diff --git a/test/http-api/inject/resolve.js b/test/http-api/inject/resolve.js index 21baf6ba71..c228a06607 100644 --- a/test/http-api/inject/resolve.js +++ b/test/http-api/inject/resolve.js @@ -1,11 +1,9 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ 'use strict' const expect = require('chai').expect const FormData = require('form-data') const streamToPromise = require('stream-to-promise') -const multibase = require('multibase') module.exports = (http) => { describe('resolve', () => { @@ -15,30 +13,6 @@ module.exports = (http) => { api = http.api._httpApi._apiServers[0] }) - it('should resolve a path and return a base2 encoded CID', async () => { - const form = new FormData() - form.append('data', Buffer.from('TEST' + Date.now())) - const headers = form.getHeaders() - - const payload = await streamToPromise(form) - let res = await api.inject({ - method: 'POST', - url: '/api/v0/add', - headers: headers, - payload: payload - }) - expect(res.statusCode).to.equal(200) - const hash = JSON.parse(res.result).Hash - - res = await api.inject({ - method: 'POST', - url: `/api/v0/resolve?arg=/ipfs/${hash}&cid-base=base2` - }) - - expect(res.statusCode).to.equal(200) - expect(multibase.isEncoded(res.result.Path.replace('/ipfs/', ''))).to.deep.equal('base2') - }) - it('should not resolve a path for invalid cid-base option', async () => { const form = new FormData() form.append('data', Buffer.from('TEST' + Date.now())) From fb663c0f2b6ca0721647675065c5f52147e26f67 Mon Sep 17 00:00:00 2001 From: Hugo Dias Date: Mon, 29 Jul 2019 10:57:05 +0100 Subject: [PATCH 2/8] fix: remove the `r` querystring from resolve the api docs doesn't have this options so it's best to remove and simplify the logic --- src/http/api/resources/resolve.js | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/src/http/api/resources/resolve.js b/src/http/api/resources/resolve.js index 85a83f5912..906e0f0e9d 100644 --- a/src/http/api/resources/resolve.js +++ b/src/http/api/resources/resolve.js @@ -10,13 +10,7 @@ log.error = debug('ipfs:http-api:resolve:error') module.exports = { validate: { query: Joi.object().keys({ - r: Joi.alternatives() - .when('recursive', { - is: Joi.any().exist(), - then: Joi.any().forbidden(), - otherwise: Joi.boolean() - }), - recursive: Joi.boolean(), + recursive: Joi.boolean().default(true), arg: Joi.string().required(), 'cid-base': Joi.string().valid(multibase.names) }).unknown() @@ -24,7 +18,7 @@ module.exports = { async handler (request, h) { const { ipfs } = request.server.app const name = request.query.arg - const recursive = request.query.r || request.query.recursive || true + const recursive = request.query.recursive const cidBase = request.query['cid-base'] log(name, { recursive, cidBase }) From 9cf83861a0d013adf46c60399269e5c43417205d Mon Sep 17 00:00:00 2001 From: Hugo Dias Date: Wed, 21 Aug 2019 11:47:53 +0100 Subject: [PATCH 3/8] feat: nodeify mfs --- src/core/components/files-mfs.js | 201 ++++++++++++++++++++++--------- 1 file changed, 146 insertions(+), 55 deletions(-) diff --git a/src/core/components/files-mfs.js b/src/core/components/files-mfs.js index 94a5ea8594..cbd56fe38c 100644 --- a/src/core/components/files-mfs.js +++ b/src/core/components/files-mfs.js @@ -6,7 +6,7 @@ const toPullStream = require('async-iterator-to-pull-stream') const toReadableStream = require('async-iterator-to-stream') const pullStreamToAsyncIterator = require('pull-stream-to-async-iterator') const all = require('async-iterator-all') -const callbackify = require('callbackify') +const nodeify = require('promise-nodeify') const PassThrough = require('stream').PassThrough const pull = require('pull-stream/pull') const map = require('pull-stream/throughs/map') @@ -26,36 +26,153 @@ const mapLsFile = (options = {}) => { } } -module.exports = self => { - const methods = mfs({ - ipld: self._ipld, - blocks: self._blockService, - datastore: self._repo.root, - repoOwner: self._options.repoOwner - }) - - const withPreload = fn => (...args) => { - const paths = args.filter(arg => isIpfs.ipfsPath(arg) || isIpfs.cid(arg)) +const withPreload = fn => (...args) => { + const paths = args.filter(arg => isIpfs.ipfsPath(arg) || isIpfs.cid(arg)) - if (paths.length) { - const options = args[args.length - 1] - if (options.preload !== false) { - paths.forEach(path => self._preload(path)) - } + if (paths.length) { + const options = args[args.length - 1] + if (options.preload !== false) { + paths.forEach(path => self._preload(path)) } + } + + return fn(...args) +} +module.exports = (/** @type { import("../index") } */ ipfs) => { + const methodsOriginal = mfs({ + ipld: ipfs._ipld, + blocks: ipfs._blockService, + datastore: ipfs._repo.root, + repoOwner: ipfs._options.repoOwner + }) - return fn(...args) + const methods = { + ...methodsOriginal, + cp: withPreload(methodsOriginal.cp), + ls: withPreload(methodsOriginal.ls), + mv: withPreload(methodsOriginal.mv), + read: withPreload(methodsOriginal.read), + stat: withPreload(methodsOriginal.stat) } return { - cp: callbackify.variadic(withPreload(methods.cp)), - flush: callbackify.variadic(methods.flush), - ls: callbackify.variadic(withPreload(async (path, options = {}) => { - const files = await all(methods.ls(path, options)) - - return files.map(mapLsFile(options)) - })), - lsReadableStream: withPreload((path, options = {}) => { + /** + * Copy files + * + * @param {String | Array} from - The path(s) of the source to copy. + * @param {String} to - The path of the destination to copy to. + * @param {Object} [opts] - Options for copy. + * @param {boolean} [opts.parents=false] - Whether or not to make the parent directories if they don't exist. (default: false) + * @param {String} [opts.format=dag-pb] - Format of nodes to write any newly created directories as. (default: dag-pb) + * @param {String} [opts.hashAlg=sha2-256] - Algorithm to use when creating CIDs for newly created directories. (default: sha2-256) {@link https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343 The list of all possible values} + * @param {boolean} [opts.flush=true] - Whether or not to immediately flush MFS changes to disk (default: true). + * @param {function(Error): void} [cb] - Callback function. + * @returns {Promise | void} - When callback is provided nothing is returned. + */ + cp: (from, to, opts, cb) => { + if (typeof opts === 'function') { + cb = opts + opts = {} + } + return nodeify(methods.cp(from, to, opts), cb) + }, + + mkdir: (path, opts, cb) => { + if (typeof opts === 'function') { + cb = opts + opts = {} + } + return nodeify(methods.mkdir(path, opts), cb) + }, + + stat: (path, opts, cb) => { + const stat = async (path, opts = {}) => { + const stats = await methods.stat(path, opts) + + stats.hash = stats.cid.toBaseEncodedString(opts && opts.cidBase) + delete stats.cid + + return stats + } + + if (typeof opts === 'function') { + cb = opts + opts = {} + } + + return nodeify(stat(path, opts), cb) + }, + + rm: (paths, opts, cb) => { + if (typeof opts === 'function') { + cb = opts + opts = {} + } + return nodeify(methods.rm(paths, opts), cb) + }, + + read: (path, opts, cb) => { + const read = async (path, opts = {}) => { + return Buffer.concat(await all(methods.read(path, opts))) + } + + if (typeof opts === 'function') { + cb = opts + opts = {} + } + return nodeify(read(path, opts), cb) + }, + + readPullStream: (path, opts = {}) => toPullStream.source(methods.read(path, opts)), + + readReadableStream: (path, opts = {}) => toReadableStream(methods.read(path, opts)), + + write: (path, content, opts, cb) => { + const write = async (path, content, opts = {}) => { + if (isPullStream.isSource(content)) { + content = pullStreamToAsyncIterator(content) + } + + await methods.write(path, content, opts) + } + if (typeof opts === 'function') { + cb = opts + opts = {} + } + return nodeify(write(path, content, opts), cb) + }, + + mv: (from, to, opts, cb) => { + if (typeof opts === 'function') { + cb = opts + opts = {} + } + return nodeify(methods.mv(from, to, opts), cb) + }, + + flush: (paths, cb) => nodeify(methods.flush(paths), cb), + + ls: (path, opts, cb) => { + const ls = async (path, opts = {}) => { + const files = await all(methods.ls(path, opts)) + + return files.map(mapLsFile(opts)) + } + + if (typeof path === 'function') { + cb = path + path = '/' + opts = {} + } + + if (typeof opts === 'function') { + cb = opts + opts = {} + } + return nodeify(ls(path, opts), cb) + }, + + lsReadableStream: (path, options = {}) => { const stream = toReadableStream.obj(methods.ls(path, options)) const through = new PassThrough({ objectMode: true @@ -75,39 +192,13 @@ module.exports = self => { }) return through - }), - lsPullStream: withPreload((path, options = {}) => { + }, + + lsPullStream: (path, options = {}) => { return pull( toPullStream.source(methods.ls(path, options)), map(mapLsFile(options)) ) - }), - mkdir: callbackify.variadic(methods.mkdir), - mv: callbackify.variadic(withPreload(methods.mv)), - read: callbackify.variadic(withPreload(async (path, options = {}) => { - return Buffer.concat(await all(methods.read(path, options))) - })), - readPullStream: withPreload((path, options = {}) => { - return toPullStream.source(methods.read(path, options)) - }), - readReadableStream: withPreload((path, options = {}) => { - return toReadableStream(methods.read(path, options)) - }), - rm: callbackify.variadic(methods.rm), - stat: callbackify.variadic(withPreload(async (path, options = {}) => { - const stats = await methods.stat(path, options) - - stats.hash = cidToString(stats.cid, { base: options.cidBase }) - delete stats.cid - - return stats - })), - write: callbackify.variadic(async (path, content, options = {}) => { - if (isPullStream.isSource(content)) { - content = pullStreamToAsyncIterator(content) - } - - await methods.write(path, content, options) - }) + } } } From ddfd7ada5352d546f22d9a6de14f0b0948623dcd Mon Sep 17 00:00:00 2001 From: Hugo Dias Date: Fri, 30 Aug 2019 11:02:30 +0100 Subject: [PATCH 4/8] fix: change mfs to use promise-nodeify --- package.json | 2 +- src/core/components/files-mfs.js | 216 +++++++++++++++++++++++++++---- 2 files changed, 193 insertions(+), 25 deletions(-) diff --git a/package.json b/package.json index ad686eba63..e7cfec839b 100644 --- a/package.json +++ b/package.json @@ -194,8 +194,8 @@ "execa": "^2.0.4", "form-data": "^2.5.1", "hat": "0.0.3", - "interface-ipfs-core": "^0.111.0", "ipfsd-ctl": "~0.45.0", + "interface-ipfs-core": "^0.111.1", "libp2p-websocket-star": "~0.10.2", "ncp": "^2.0.0", "p-event": "^4.1.0", diff --git a/src/core/components/files-mfs.js b/src/core/components/files-mfs.js index cbd56fe38c..409ae9f6f3 100644 --- a/src/core/components/files-mfs.js +++ b/src/core/components/files-mfs.js @@ -13,6 +13,11 @@ const map = require('pull-stream/throughs/map') const isIpfs = require('is-ipfs') const { cidToString } = require('../../utils/cid') +/** + * @typedef { import("readable-stream").Readable } ReadableStream + * @typedef { import("pull-stream") } PullStream + */ + const mapLsFile = (options = {}) => { const long = options.long || options.l @@ -26,18 +31,6 @@ const mapLsFile = (options = {}) => { } } -const withPreload = fn => (...args) => { - const paths = args.filter(arg => isIpfs.ipfsPath(arg) || isIpfs.cid(arg)) - - if (paths.length) { - const options = args[args.length - 1] - if (options.preload !== false) { - paths.forEach(path => self._preload(path)) - } - } - - return fn(...args) -} module.exports = (/** @type { import("../index") } */ ipfs) => { const methodsOriginal = mfs({ ipld: ipfs._ipld, @@ -46,9 +39,22 @@ module.exports = (/** @type { import("../index") } */ ipfs) => { repoOwner: ipfs._options.repoOwner }) + const withPreload = fn => (...args) => { + const paths = args.filter(arg => isIpfs.ipfsPath(arg) || isIpfs.cid(arg)) + + if (paths.length) { + const options = args[args.length - 1] + if (options.preload !== false) { + paths.forEach(path => ipfs._preload(path)) + } + } + + return fn(...args) + } + const methods = { ...methodsOriginal, - cp: withPreload(methodsOriginal.cp), + // cp: withPreload(methodsOriginal.cp), ls: withPreload(methodsOriginal.ls), mv: withPreload(methodsOriginal.mv), read: withPreload(methodsOriginal.read), @@ -67,7 +73,7 @@ module.exports = (/** @type { import("../index") } */ ipfs) => { * @param {String} [opts.hashAlg=sha2-256] - Algorithm to use when creating CIDs for newly created directories. (default: sha2-256) {@link https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343 The list of all possible values} * @param {boolean} [opts.flush=true] - Whether or not to immediately flush MFS changes to disk (default: true). * @param {function(Error): void} [cb] - Callback function. - * @returns {Promise | void} - When callback is provided nothing is returned. + * @returns {Promise | void} When callback is provided nothing is returned. */ cp: (from, to, opts, cb) => { if (typeof opts === 'function') { @@ -77,6 +83,18 @@ module.exports = (/** @type { import("../index") } */ ipfs) => { return nodeify(methods.cp(from, to, opts), cb) }, + /** + * Make a directory + * + * @param {String} path - The path to the directory to make. + * @param {Object} [opts] - Options for mkdir. + * @param {boolean} [opts.parents=false] - Value to decide whether or not to make the parent directories if they don't exist. (default: false) + * @param {String} [opts.format=dag-pb] - Format of nodes to write any newly created directories as. (default: dag-pb). + * @param {String} [opts.hashAlg] - Algorithm to use when creating CIDs for newly created directories. (default: sha2-256) {@link https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343 The list of all possible values} + * @param {boolean} [opts.flush=true] - Whether or not to immediately flush MFS changes to disk (default: true). + * @param {function(Error): void} [cb] - Callback function. + * @returns {Promise | void} When callback is provided nothing is returned. + */ mkdir: (path, opts, cb) => { if (typeof opts === 'function') { cb = opts @@ -85,6 +103,30 @@ module.exports = (/** @type { import("../index") } */ ipfs) => { return nodeify(methods.mkdir(path, opts), cb) }, + /** + * @typedef {Object} StatOutput + * @prop {String} hash - Output hash. + * @prop {number} size - File size in bytes. + * @prop {number} cumulativeSize - Integer with the size of the DAGNodes making up the file in Bytes. + * @prop {string} type - Output type either 'directory' or 'file'. + * @prop {number} blocks - If type is directory, this is the number of files in the directory. If it is file it is the number of blocks that make up the file. + * @prop {boolean} withLocality - Indicate if locality information is present. + * @prop {boolean} local - Indicate if the queried dag is fully present locally. + * @prop {number} sizeLocal - Integer indicating the cumulative size of the data present locally. + */ + + /** + * Get file or directory status. + * + * @param {String} path - Path to the file or directory to stat. + * @param {Object} [opts] - Options for stat. + * @param {boolean} [opts.hash=false] - Return only the hash. (default: false) + * @param {boolean} [opts.size=false] - Return only the size. (default: false) + * @param {boolean} [opts.withLocal=false] - Compute the amount of the dag that is local, and if possible the total size. (default: false) + * @param {String} [opts.cidBase=base58btc] - Which number base to use to format hashes - e.g. base32, base64 etc. (default: base58btc) + * @param {function(Error, StatOutput): void} [cb] - Callback function. + * @returns {Promise | void} When callback is provided nothing is returned. + */ stat: (path, opts, cb) => { const stat = async (path, opts = {}) => { const stats = await methods.stat(path, opts) @@ -103,6 +145,15 @@ module.exports = (/** @type { import("../index") } */ ipfs) => { return nodeify(stat(path, opts), cb) }, + /** + * Remove a file or directory. + * + * @param {String | Array} paths - One or more paths to remove. + * @param {Object} [opts] - Options for remove. + * @param {boolean} [opts.recursive=false] - Whether or not to remove directories recursively. (default: false) + * @param {function(Error): void} [cb] - Callback function. + * @returns {Promise | void} When callback is provided nothing is returned. + */ rm: (paths, opts, cb) => { if (typeof opts === 'function') { cb = opts @@ -111,6 +162,20 @@ module.exports = (/** @type { import("../index") } */ ipfs) => { return nodeify(methods.rm(paths, opts), cb) }, + /** + * @typedef {Object} ReadOptions + * @prop {number} [opts.offset=0] - Integer with the byte offset to begin reading from (default: 0). + * @prop {number} [opts.length] - Integer with the maximum number of bytes to read (default: Read to the end of stream). + */ + + /** + * Read a file into a Buffer. + * + * @param {string} path - Path of the file to read and must point to a file (and not a directory). + * @param {ReadOptions} [opts] - Object for read. + * @param {function(Error, Buffer): void} [cb] - Callback function. + * @returns {Promise | void} When callback is provided nothing is returned. + */ read: (path, opts, cb) => { const read = async (path, opts = {}) => { return Buffer.concat(await all(methods.read(path, opts))) @@ -123,10 +188,40 @@ module.exports = (/** @type { import("../index") } */ ipfs) => { return nodeify(read(path, opts), cb) }, - readPullStream: (path, opts = {}) => toPullStream.source(methods.read(path, opts)), - + /** + * Read a file into a ReadableStream. + * + * @param {string} path - Path of the file to read and must point to a file (and not a directory). + * @param {ReadOptions} [opts] - Object for read. + * @returns {ReadableStream} Returns a ReadableStream with the contents of path. + */ readReadableStream: (path, opts = {}) => toReadableStream(methods.read(path, opts)), + /** + * Read a file into a PullStrean. + * + * @param {string} path - Path of the file to read and must point to a file (and not a directory). + * @param {ReadOptions} [opts] - Object for read. + * @returns {PullStream} Returns a PullStream with the contents of path. + */ + readPullStream: (path, opts = {}) => toPullStream.source(methods.read(path, opts)), + + /** + * Write to a file. + * + * @param {string} path - Path of the file to write. + * @param {Buffer | PullStream | ReadableStream | Blob | string} content - Content to write. + * @param {Object} opts - Options for write. + * @param {number} [opts.offset=0] - Integer with the byte offset to begin writing at. (default: 0) + * @param {boolean} [opts.create=false] - Indicate to create the file if it doesn't exist. (default: false) + * @param {boolean} [opts.truncate=false] - Indicate if the file should be truncated after writing all the bytes from content. (default: false) + * @param {boolena} [opts.parents=false] - Value to decide whether or not to make the parent directories if they don't exist. (default: false) + * @param {number} [opts.length] - Maximum number of bytes to read. (default: Read all bytes from content) + * @param {boolean} [opts.rawLeaves=false] - If true, DAG leaves will contain raw file data and not be wrapped in a protobuf. (default: false) + * @param {number} [opts.cidVersion=0] - The CID version to use when storing the data (storage keys are based on the CID, including its version). (default: 0) + * @param {function(Error): void} [cb] - Callback function. + * @returns {Promise | void} When callback is provided nothing is returned. + */ write: (path, content, opts, cb) => { const write = async (path, content, opts = {}) => { if (isPullStream.isSource(content)) { @@ -142,6 +237,29 @@ module.exports = (/** @type { import("../index") } */ ipfs) => { return nodeify(write(path, content, opts), cb) }, + /** + * Move files. + * + * @param {string | Array} from - Path(s) of the source to move. + * @param {string} to - Path of the destination to move to. + * @param {Object} opts - Options for mv. + * @param {boolean} [opts.parents=false] - Value to decide whether or not to make the parent directories if they don't exist. (default: false) + * @param {String} [opts.format=dag-pb] - Format of nodes to write any newly created directories as. (default: dag-pb). + * @param {String} [opts.hashAlg] - Algorithm to use when creating CIDs for newly created directories. (default: sha2-256) {@link https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343 The list of all possible values} + * @param {boolean} [opts.flush=true] - Value to decide whether or not to immediately flush MFS changes to disk. (default: true) + * @param {function(Error): void} [cb] - Callback function. + * @returns {Promise | void} When callback is provided nothing is returned. + * @description + * If from has multiple values then to must be a directory. + * + * If from has a single value and to exists and is a directory, from will be moved into to. + * + * If from has a single value and to exists and is a file, from must be a file and the contents of to will be replaced with the contents of from otherwise an error will be returned. + * + * If from is an IPFS path, and an MFS path exists with the same name, the IPFS path will be chosen. + * + * All values of from will be removed after the operation is complete unless they are an IPFS path. + */ mv: (from, to, opts, cb) => { if (typeof opts === 'function') { cb = opts @@ -150,8 +268,44 @@ module.exports = (/** @type { import("../index") } */ ipfs) => { return nodeify(methods.mv(from, to, opts), cb) }, - flush: (paths, cb) => nodeify(methods.flush(paths), cb), + /** + * Flush a given path's data to the disk. + * + * @param {string | Array} [paths] - String paths to flush. (default: /) + * @param {function(Error): void} [cb] - Callback function. + * @returns {Promise | void} When callback is provided nothing is returned. + */ + flush: (paths, cb) => { + if (typeof paths === 'function') { + cb = paths + paths = undefined + } + return nodeify(methods.flush(paths), cb) + }, + + /** + * @typedef {Object} ListOutputFile + * @prop {string} name - Which is the file's name. + * @prop {string} type - Which is the object's type (directory or file). + * @prop {number} size - The size of the file in bytes. + * @prop {string} hash - The hash of the file. + */ + + /** + * @typedef {Object} ListOptions + * @prop {boolean} [long=false] - Value to decide whether or not to populate type, size and hash. (default: false) + * @prop {string} [cidBase=base58btc] - Which number base to use to format hashes - e.g. base32, base64 etc. (default: base58btc) + * @prop {boolean} [sort=false] - If true entries will be sorted by filename. (default: false) + */ + /** + * List directories in the local mutable namespace. + * + * @param {string} [path="/"] - String to show listing for. (default: /) + * @param {ListOptions} [opts] - Options for list. + * @param {function(Error, Array): void} [cb] - Callback function. + * @returns {Promise> | void} When callback is provided nothing is returned. + */ ls: (path, opts, cb) => { const ls = async (path, opts = {}) => { const files = await all(methods.ls(path, opts)) @@ -172,20 +326,27 @@ module.exports = (/** @type { import("../index") } */ ipfs) => { return nodeify(ls(path, opts), cb) }, - lsReadableStream: (path, options = {}) => { - const stream = toReadableStream.obj(methods.ls(path, options)) + /** + * Lists a directory from the local mutable namespace that is addressed by a valid IPFS Path. The list will be yielded as Readable Streams. + * + * @param {string} [path="/"] - String to show listing for. (default: /) + * @param {ListOptions} [opts] - Options for list. + * @returns {ReadableStream} It returns a Readable Stream in Object mode that will yield {@link ListOutputFile} + */ + lsReadableStream: (path, opts = {}) => { + const stream = toReadableStream.obj(methods.ls(path, opts)) const through = new PassThrough({ objectMode: true }) stream.on('data', (file) => { - through.write(mapLsFile(options)(file)) + through.write(mapLsFile(opts)(file)) }) stream.on('error', (err) => { through.destroy(err) }) stream.on('end', (file, enc, cb) => { if (file) { - file = mapLsFile(options)(file) + file = mapLsFile(opts)(file) } through.end(file, enc, cb) @@ -194,10 +355,17 @@ module.exports = (/** @type { import("../index") } */ ipfs) => { return through }, - lsPullStream: (path, options = {}) => { + /** + * Lists a directory from the local mutable namespace that is addressed by a valid IPFS Path. The list will be yielded as PullStreams. + * + * @param {string} [path="/"] - String to show listing for. (default: /) + * @param {ListOptions} [opts] - Options for list. + * @returns {PullStream} It returns a PullStream that will yield {@link ListOutputFile} + */ + lsPullStream: (path, opts = {}) => { return pull( - toPullStream.source(methods.ls(path, options)), - map(mapLsFile(options)) + toPullStream.source(methods.ls(path, opts)), + map(mapLsFile(opts)) ) } } From f1018befe4855cec86aeb276a99f88922c9cc3a1 Mon Sep 17 00:00:00 2001 From: Hugo Dias Date: Fri, 30 Aug 2019 18:12:04 +0100 Subject: [PATCH 5/8] chore: uncomment important stuff --- src/core/components/files-mfs.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/core/components/files-mfs.js b/src/core/components/files-mfs.js index 409ae9f6f3..0bdb95890c 100644 --- a/src/core/components/files-mfs.js +++ b/src/core/components/files-mfs.js @@ -54,7 +54,7 @@ module.exports = (/** @type { import("../index") } */ ipfs) => { const methods = { ...methodsOriginal, - // cp: withPreload(methodsOriginal.cp), + cp: withPreload(methodsOriginal.cp), ls: withPreload(methodsOriginal.ls), mv: withPreload(methodsOriginal.mv), read: withPreload(methodsOriginal.read), From 38ba0f33b381cc7a31bbf04585c0ee9839cf5a93 Mon Sep 17 00:00:00 2001 From: Hugo Dias Date: Mon, 2 Sep 2019 17:14:50 +0100 Subject: [PATCH 6/8] fix: skip a test for a not implemented feature --- test/core/interface.spec.js | 7 ++++++- test/http-api/interface.js | 9 ++++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/test/core/interface.spec.js b/test/core/interface.spec.js index f2151a88df..3b3ee46682 100644 --- a/test/core/interface.spec.js +++ b/test/core/interface.spec.js @@ -153,7 +153,12 @@ describe('interface-ipfs-core tests', function () { initOptions: { bits: 512 } } }), { - skip: isNode ? null : { + skip: isNode ? [ + { + name: 'should publish message from string', + reason: 'not implemented' + } + ] : { reason: 'FIXME: disabled because no swarm addresses' } }) diff --git a/test/http-api/interface.js b/test/http-api/interface.js index 23b5916624..7f1a459ae9 100644 --- a/test/http-api/interface.js +++ b/test/http-api/interface.js @@ -151,7 +151,14 @@ describe('interface-ipfs-core over ipfs-http-client tests', () => { args: ['--enable-pubsub'], initOptions: { bits: 512 } } - })) + }), { + skip: [ + { + name: 'should publish message from string', + reason: 'not implemented' + } + ] + }) tests.repo(defaultCommonFactory) From ca2d688ead3c0c05db9bc18291402fd7a15604a6 Mon Sep 17 00:00:00 2001 From: Hugo Dias Date: Tue, 3 Sep 2019 18:19:53 +0100 Subject: [PATCH 7/8] chore: unskip test --- test/core/interface.spec.js | 13 +------------ test/http-api/interface.js | 17 +---------------- 2 files changed, 2 insertions(+), 28 deletions(-) diff --git a/test/core/interface.spec.js b/test/core/interface.spec.js index 3b3ee46682..d75d355c6d 100644 --- a/test/core/interface.spec.js +++ b/test/core/interface.spec.js @@ -93,18 +93,7 @@ describe('interface-ipfs-core tests', function () { tests.miscellaneous(CommonFactory.create({ // No need to stop, because the test suite does a 'stop' test. createTeardown: () => cb => cb() - }), { - skip: [ - { - name: 'should resolve an IPNS DNS link', - reason: 'TODO: IPNS resolve not yet implemented https://github.com/ipfs/js-ipfs/issues/1918' - }, - { - name: 'should resolve IPNS link recursively', - reason: 'TODO: IPNS resolve not yet implemented https://github.com/ipfs/js-ipfs/issues/1918' - } - ] - }) + })) tests.name(CommonFactory.create({ spawnOptions: { diff --git a/test/http-api/interface.js b/test/http-api/interface.js index 7f1a459ae9..0523be0170 100644 --- a/test/http-api/interface.js +++ b/test/http-api/interface.js @@ -92,22 +92,7 @@ describe('interface-ipfs-core over ipfs-http-client tests', () => { tests.miscellaneous(CommonFactory.create({ // No need to stop, because the test suite does a 'stop' test. createTeardown: () => cb => cb() - }), { - skip: [ - { - name: 'should resolve an IPNS DNS link', - reason: 'TODO: IPNS resolve not yet implemented https://github.com/ipfs/js-ipfs/issues/1918' - }, - { - name: 'should resolve IPNS link recursively', - reason: 'TODO: IPNS resolve not yet implemented https://github.com/ipfs/js-ipfs/issues/1918' - }, - { - name: 'should recursively resolve ipfs.io', - reason: 'TODO: ipfs.io dnslink=/ipns/website.ipfs.io & IPNS resolve not yet implemented https://github.com/ipfs/js-ipfs/issues/1918' - } - ] - }) + })) tests.name(CommonFactory.create({ spawnOptions: { From 1e40992ea226e8409854056721a9370727038ccf Mon Sep 17 00:00:00 2001 From: Hugo Dias Date: Wed, 4 Sep 2019 12:19:30 +0100 Subject: [PATCH 8/8] chore: fix misc tests --- test/core/interface.spec.js | 5 ++++- test/http-api/interface.js | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/test/core/interface.spec.js b/test/core/interface.spec.js index d75d355c6d..4da19bc846 100644 --- a/test/core/interface.spec.js +++ b/test/core/interface.spec.js @@ -92,7 +92,10 @@ describe('interface-ipfs-core tests', function () { tests.miscellaneous(CommonFactory.create({ // No need to stop, because the test suite does a 'stop' test. - createTeardown: () => cb => cb() + createTeardown: () => cb => cb(), + spawnOptions: { + args: ['--pass ipfs-is-awesome-software', '--offline'] + } })) tests.name(CommonFactory.create({ diff --git a/test/http-api/interface.js b/test/http-api/interface.js index 0523be0170..f0b2cbc1f8 100644 --- a/test/http-api/interface.js +++ b/test/http-api/interface.js @@ -91,7 +91,10 @@ describe('interface-ipfs-core over ipfs-http-client tests', () => { tests.miscellaneous(CommonFactory.create({ // No need to stop, because the test suite does a 'stop' test. - createTeardown: () => cb => cb() + createTeardown: () => cb => cb(), + spawnOptions: { + args: ['--pass ipfs-is-awesome-software', '--offline'] + } })) tests.name(CommonFactory.create({