From ed62755a27b6eac057b58f7576bd9a5691576849 Mon Sep 17 00:00:00 2001 From: David Dias Date: Sat, 3 Jun 2017 15:48:29 +0100 Subject: [PATCH 01/14] chore: update deps --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 156de975..83b79498 100644 --- a/package.json +++ b/package.json @@ -40,9 +40,9 @@ "homepage": "https://github.com/ipfs/js-ipfs-unixfs-engine#readme", "devDependencies": { "aegir": "^11.0.2", - "chai": "^3.5.0", + "chai": "^4.0.1", "dirty-chai": "^1.2.2", - "ipfs": "^0.24.0", + "ipfs": "^0.24.1", "ipfs-block-service": "^0.9.1", "ipfs-repo": "^0.13.1", "ncp": "^2.0.0", From 73ddf53e5c0d18c2784ae1eaa13c69006865e12e Mon Sep 17 00:00:00 2001 From: David Dias Date: Sat, 3 Jun 2017 16:01:50 +0100 Subject: [PATCH 02/14] docs: update readme, fix #168 --- README.md | 73 +++++++++++++++++++++++++++++-------------------------- 1 file changed, 39 insertions(+), 34 deletions(-) diff --git a/README.md b/README.md index b9b747a4..6ec42cb3 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ IPFS unixFS Engine ![](https://img.shields.io/badge/npm-%3E%3D3.0.0-orange.svg?style=flat-square) ![](https://img.shields.io/badge/Node.js-%3E%3D4.0.0-orange.svg?style=flat-square) -> JavaScript implementation of the layout and chunking mechanisms used by IPFS +> JavaScript implementation of the layout and chunking mechanisms used by IPFS to handle Files ## Table of Contents @@ -35,9 +35,12 @@ IPFS unixFS Engine ## Usage -### Example Importer +### Importer + +#### Importer example Let's create a little directory to import: + ```sh > cd /tmp > mkdir foo @@ -46,6 +49,7 @@ Let's create a little directory to import: ``` And write the importing logic: + ```js const Importer = require('ipfs-unixfs-engine').Importer const filesAddStream = new Importer(, size: 39243, path: '/tmp/foo/bar' } @@ -93,15 +98,15 @@ When run, the stat of DAG Node is outputted for each file on data event until th ``` -### Importer API +#### Importer API ```js const Importer = require('ipfs-unixfs-engine').Importer ``` -#### const add = new Importer(dag) +#### const import = new Importer(dag [, options]) -The importer is a object Transform stream that accepts objects of the form +The `import` object is a duplex pull stream that takes objects of the form: ```js { @@ -110,50 +115,50 @@ The importer is a object Transform stream that accepts objects of the form } ``` -The stream will output IPFS DAG Node stats for the nodes as they are added to -the DAG Service. When stats on a node are emitted they are guaranteed to have -been written into the [DAG Service][]'s storage mechanism. +`import` will outoyt file info objects as files get stored in IPFS. When stats on a node are emitted they are guaranteed to have been written. -The input's file paths and directory structure will be preserved in the DAG -Nodes. +`dag` is an instance of the [`IPLD Resolver`](https://github.com/ipld/js-ipld-resolver) or the [`js-ipfs` `dag api`](https://github.com/ipfs/interface-ipfs-core/tree/master/API/dag) -### Importer options +The input's file paths and directory structure will be preserved in the [`dag-pb`](https://github.com/ipld/js-ipld-dag-pb) created nodes. -In the second argument of the importer constructor you can specify the following options: +`options` is an JavaScript option that might include the following keys: -* `wrap` (boolean, defaults to false): if true, a wrapping node will be created -* `shardSplitThreshold` (positive integer, defaults to 1000): the number of directory entries above which we decide to use a sharding directory builder (instead of the default flat one) -* `chunker` (string, defaults to `"fixed"`): the chunking strategy. Now only supports `"fixed"` -* `chunkerOptions` (object, optional): the options for the chunker. Defaults to an object with the following properties: - * `maxChunkSize` (positive integer, defaults to `262144`): the maximum chunk size for the `fixed` chunker. -* `strategy` (string, defaults to `"balanced"`): the DAG builder strategy name. Supports: - * `flat`: flat list of chunks - * `balanced`: builds a balanced tree - * `trickle`: builds [a trickle tree](https://github.com/ipfs/specs/pull/57#issuecomment-265205384) -* `maxChildrenPerNode` (positive integer, defaults to `174`): the maximum children per node for the `balanced` and `trickle` DAG builder strategies -* `layerRepeat` (positive integer, defaults to 4): (only applicable to the `trickle` DAG builder strategy). The maximum repetition of parent nodes for each layer of the tree. -* `reduceSingleLeafToSelf` (boolean, defaults to `false`): optimization for, when reducing a set of nodes with one node, reduce it to that node. -* `dirBuilder` (object): the options for the directory builder - * `hamt` (object): the options for the HAMT sharded directory builder - * bits (positive integer, defaults to `5`): the number of bits at each bucket of the HAMT +- `wrap` (boolean, defaults to false): if true, a wrapping node will be created +- `shardSplitThreshold` (positive integer, defaults to 1000): the number of directory entries above which we decide to use a sharding directory builder (instead of the default flat one) +- `chunker` (string, defaults to `"fixed"`): the chunking strategy. Now only supports `"fixed"` +- `chunkerOptions` (object, optional): the options for the chunker. Defaults to an object with the following properties: + - `maxChunkSize` (positive integer, defaults to `262144`): the maximum chunk size for the `fixed` chunker. +- `strategy` (string, defaults to `"balanced"`): the DAG builder strategy name. Supports: + - `flat`: flat list of chunks + - `balanced`: builds a balanced tree + - `trickle`: builds [a trickle tree](https://github.com/ipfs/specs/pull/57#issuecomment-265205384) +- `maxChildrenPerNode` (positive integer, defaults to `174`): the maximum children per node for the `balanced` and `trickle` DAG builder strategies +- `layerRepeat` (positive integer, defaults to 4): (only applicable to the `trickle` DAG builder strategy). The maximum repetition of parent nodes for each layer of the tree. +- `reduceSingleLeafToSelf` (boolean, defaults to `false`): optimization for, when reducing a set of nodes with one node, reduce it to that node. +- `dirBuilder` (object): the options for the directory builder + - `hamt` (object): the options for the HAMT sharded directory builder + - bits (positive integer, defaults to `5`): the number of bits at each bucket of the HAMT -### Example Exporter +### Exporter -``` -// Create an export readable object stream with the hash you want to export and a dag service -const filesStream = Exporter(, ) +#### Exporter example + +```js +// Create an export source pull-stream cid or ipfs path you want to export and a +// to fetch the file from +const filesStream = Exporter(, ) // Pipe the return stream to console filesStream.on('data', (file) => file.content.pipe(process.stdout)) ``` -### Exporter: API +#### Exporter API ```js const Exporter = require('ipfs-unixfs-engine').Exporter ``` -### new Exporter(, ) +### new Exporter(, ) Uses the given [dag API or an ipld-resolver instance][] to fetch an IPFS [UnixFS][] object(s) by their multiaddress. From defaf0a59f576a238711f75c2b0e2ebaac3ff9f7 Mon Sep 17 00:00:00 2001 From: David Dias Date: Sat, 3 Jun 2017 16:54:01 +0100 Subject: [PATCH 03/14] feat: support to export CID --- package.json | 5 +++- src/exporter/index.js | 54 ++++++++++++++++++++++++++++++++++++------- test/test-exporter.js | 21 +++++++++++++++-- 3 files changed, 69 insertions(+), 11 deletions(-) diff --git a/package.json b/package.json index 83b79498..546a0bef 100644 --- a/package.json +++ b/package.json @@ -54,6 +54,7 @@ }, "dependencies": { "async": "^2.4.1", + "bs58": "^4.0.1", "cids": "^0.5.0", "deep-extend": "^0.5.0", "ipfs-unixfs": "^0.1.11", @@ -67,6 +68,7 @@ "pull-batch": "^1.0.0", "pull-block": "^1.2.0", "pull-cat": "^1.1.11", + "pull-defer": "^0.2.2", "pull-pair": "^1.1.0", "pull-paramap": "^1.2.2", "pull-pause": "0.0.1", @@ -74,6 +76,7 @@ "pull-stream": "^3.6.0", "pull-traverse": "^1.0.3", "pull-write": "^1.1.2", + "safe-buffer": "^5.1.0", "sparse-array": "^1.3.1" }, "contributors": [ @@ -88,4 +91,4 @@ "jbenet ", "nginnever " ] -} \ No newline at end of file +} diff --git a/src/exporter/index.js b/src/exporter/index.js index f8f91e07..b0f14ecc 100644 --- a/src/exporter/index.js +++ b/src/exporter/index.js @@ -2,22 +2,60 @@ const pull = require('pull-stream') const CID = require('cids') -const isIPFS = require('is-ipfs') +const v = require('is-ipfs') +const pullDefer = require('pull-defer') const resolve = require('./resolve').resolve -const cleanMultihash = require('./clean-multihash') -module.exports = (hash, ipldResolver) => { - if (!isIPFS.multihash(hash)) { - return pull.error(new Error('not valid multihash')) +function sanitize (path) { + // Buffer -> raw multihash or CID in buffer + if (Buffer.isBuffer(path)) { + return new CID(path).toBaseEncodedString() } - hash = cleanMultihash(hash) + if (CID.isCID(path)) { + return path.toBaseEncodedString() + } + + try { + const cid = new CID(path) + return cid.toBaseEncodedString() + } catch (err) {} // not an isolated CID, can be a path + + if (v.ipfsPath(path)) { + // trim that ipfs prefix + if (path.indexOf('/ipfs/') === 0) { + path = path.substring(6) + } + + return path + } else { + throw new Error('not valid cid or path') + } +} + +module.exports = (path, dag) => { + try { + path = sanitize(path) + } catch (err) { + return pull.error(err) + } + + const d = pullDefer.source() + + const cid = new CID(path) + + dag.get(cid, (err, node) => { + if (err) { + return pull.error(err) + } + d.resolve(pull.values([node])) + }) return pull( - ipldResolver.getStream(new CID(hash)), + d, pull.map((result) => result.value), - pull.map((node) => resolve(node, hash, ipldResolver)), + pull.map((node) => resolve(node, path, dag)), pull.flatten() ) } diff --git a/test/test-exporter.js b/test/test-exporter.js index fa4de22c..036e48df 100644 --- a/test/test-exporter.js +++ b/test/test-exporter.js @@ -12,6 +12,7 @@ const pull = require('pull-stream') const zip = require('pull-zip') const CID = require('cids') const loadFixture = require('aegir/fixtures') +const Buffer = require('safe-buffer').Buffer const unixFSEngine = require('./../src') const exporter = unixFSEngine.exporter @@ -29,7 +30,7 @@ module.exports = (repo) => { it('ensure hash inputs are sanitized', (done) => { const hash = 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8' - const mhBuf = new Buffer(bs58.decode(hash)) + const mhBuf = Buffer.from(bs58.decode(hash)) const cid = new CID(hash) ipldResolver.get(cid, (err, result) => { @@ -85,6 +86,19 @@ module.exports = (repo) => { ) }) + it('export a small file with links using CID instead of multihash', (done) => { + const cid = new CID('QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q') + + pull( + exporter(cid, ipldResolver), + pull.collect((err, files) => { + expect(err).to.not.exist() + + fileEql(files[0], bigFile, done) + }) + ) + }) + it('export a large file > 5mb', (done) => { const hash = 'QmRQgufjp9vLE8XK2LGKZSsPCFCF6e4iynCQtNB5X2HBKE' pull( @@ -150,7 +164,10 @@ module.exports = (repo) => { ) }) - it('fails on non existent hash', (done) => { + // TODO: This needs for the stores to have timeouts, + // otherwise it is impossible to predict if a file doesn't + // really exist + it.skip('fails on non existent hash', (done) => { // This hash doesn't exist in the repo const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKj3' From c7d12f61db48a19e6c77db433c49a936c50f8710 Mon Sep 17 00:00:00 2001 From: Pedro Teixeira Date: Tue, 13 Jun 2017 18:03:07 +0100 Subject: [PATCH 04/14] feat: support for fetching subtree --- src/exporter/dir-flat.js | 8 ++- src/exporter/dir-hamt-sharded.js | 27 ++++++---- src/exporter/dir.js | 12 +++-- src/exporter/file.js | 8 ++- src/exporter/index.js | 41 ++++++++------- src/exporter/resolve.js | 5 +- test/node.js | 1 + test/test-dirbuilder-sharding.js | 30 +++++++++++ test/test-export-subtree.js | 87 ++++++++++++++++++++++++++++++++ 9 files changed, 183 insertions(+), 36 deletions(-) create mode 100644 test/test-export-subtree.js diff --git a/src/exporter/dir-flat.js b/src/exporter/dir-flat.js index b19d2dbd..1227f633 100644 --- a/src/exporter/dir-flat.js +++ b/src/exporter/dir-flat.js @@ -9,7 +9,9 @@ const cat = require('pull-cat') // Logic to export a unixfs directory. module.exports = dirExporter -function dirExporter (node, name, ipldResolver, resolve, parent) { +function dirExporter (node, name, pathRest, ipldResolver, resolve, parent) { + const accepts = pathRest.shift() + const dir = { path: name, hash: node.multihash @@ -20,15 +22,17 @@ function dirExporter (node, name, ipldResolver, resolve, parent) { pull( pull.values(node.links), pull.map((link) => ({ + linkName: link.name, path: path.join(name, link.name), hash: link.multihash })), + pull.filter((item) => accepts === undefined || item.linkName === accepts), paramap((item, cb) => ipldResolver.get(new CID(item.hash), (err, n) => { if (err) { return cb(err) } - cb(null, resolve(n.value, item.path, ipldResolver, name, parent)) + cb(null, resolve(n.value, item.path, pathRest, ipldResolver, name, parent)) })), pull.flatten() ) diff --git a/src/exporter/dir-hamt-sharded.js b/src/exporter/dir-hamt-sharded.js index 01171bfb..aa01d769 100644 --- a/src/exporter/dir-hamt-sharded.js +++ b/src/exporter/dir-hamt-sharded.js @@ -10,7 +10,7 @@ const cleanHash = require('./clean-multihash') // Logic to export a unixfs directory. module.exports = shardedDirExporter -function shardedDirExporter (node, name, ipldResolver, resolve, parent) { +function shardedDirExporter (node, name, pathRest, ipldResolver, resolve, parent) { let dir if (!parent || parent.path !== name) { dir = [{ @@ -25,22 +25,31 @@ function shardedDirExporter (node, name, ipldResolver, resolve, parent) { pull.values(node.links), pull.map((link) => { // remove the link prefix (2 chars for the bucket index) - let p = link.name.substring(2) - // another sharded dir or file? - p = p ? path.join(name, p) : name + const p = link.name.substring(2) + const pp = p ? path.join(name, p) : name + let accept = true - return { - name: link.name, - path: p, - hash: link.multihash + if (p && pathRest.length) { + accept = (p === pathRest[0]) + } + if (accept) { + return { + name: p, + path: pp, + hash: link.multihash, + pathRest: p ? pathRest.slice(1) : pathRest + } + } else { + return '' } }), + pull.filter(Boolean), paramap((item, cb) => ipldResolver.get(new CID(item.hash), (err, n) => { if (err) { return cb(err) } - cb(null, resolve(n.value, item.path, ipldResolver, (dir && dir[0]) || parent)) + cb(null, resolve(n.value, item.path, item.pathRest, ipldResolver, (dir && dir[0]) || parent)) })), pull.flatten() ) diff --git a/src/exporter/dir.js b/src/exporter/dir.js index 2ec2fb13..5b5aa930 100644 --- a/src/exporter/dir.js +++ b/src/exporter/dir.js @@ -1,5 +1,7 @@ 'use strict' +// TODO: REMOVE?? + const path = require('path') const pull = require('pull-stream') const paramap = require('pull-paramap') @@ -12,7 +14,7 @@ const switchType = require('../util').switchType // Logic to export a unixfs directory. module.exports = dirExporter -function dirExporter (node, name, ipldResolver) { +function dirExporter (node, name, pathRest, ipldResolver) { // The algorithm below is as follows // // 1. Take all links from a given directory node @@ -24,12 +26,16 @@ function dirExporter (node, name, ipldResolver) { // - `file`: use the fileExporter to load and return the file // 4. Flatten + const accepts = pathRest.shift() + return pull( pull.values(node.links), pull.map((link) => ({ + linkName: link.name, path: path.join(name, link.name), hash: link.multihash })), + pull.filter((item) => accepts === undefined || item.linkName === accepts), paramap((item, cb) => ipldResolver.get(new CID(item.hash), (err, result) => { if (err) { return cb(err) @@ -44,8 +50,8 @@ function dirExporter (node, name, ipldResolver) { cb(null, switchType( node, - () => cat([pull.values([dir]), dirExporter(node, item.path, ipldResolver)]), - () => fileExporter(node, item.path, ipldResolver) + () => cat([pull.values([dir]), dirExporter(node, item.path, pathRest, ipldResolver)]), + () => fileExporter(node, item.path, pathRest, ipldResolver) )) })), pull.flatten() diff --git a/src/exporter/file.js b/src/exporter/file.js index 59a910a4..3329d122 100644 --- a/src/exporter/file.js +++ b/src/exporter/file.js @@ -7,7 +7,7 @@ const pull = require('pull-stream') const paramap = require('pull-paramap') // Logic to export a single (possibly chunked) unixfs file. -module.exports = (node, name, ipldResolver) => { +module.exports = (node, name, pathRest, ipldResolver) => { function getData (node) { try { const file = UnixFS.unmarshal(node.data) @@ -25,6 +25,12 @@ module.exports = (node, name, ipldResolver) => { ) } + const accepts = pathRest.shift() + + if (accepts !== undefined && accepts !== name) { + return pull.empty() + } + let content = pull( traverse.depthFirst(node, visitor), pull.map(getData) diff --git a/src/exporter/index.js b/src/exporter/index.js index b0f14ecc..0ad31006 100644 --- a/src/exporter/index.js +++ b/src/exporter/index.js @@ -7,43 +7,46 @@ const pullDefer = require('pull-defer') const resolve = require('./resolve').resolve -function sanitize (path) { +function pathBaseAndRest (path) { // Buffer -> raw multihash or CID in buffer - if (Buffer.isBuffer(path)) { - return new CID(path).toBaseEncodedString() - } + let pathBase = path + let pathRest = '/' - if (CID.isCID(path)) { - return path.toBaseEncodedString() + if (Buffer.isBuffer(path)) { + pathBase = (new CID(path)).toBaseEncodedString() } - try { - const cid = new CID(path) - return cid.toBaseEncodedString() - } catch (err) {} // not an isolated CID, can be a path - - if (v.ipfsPath(path)) { - // trim that ipfs prefix + if (typeof path === 'string') { if (path.indexOf('/ipfs/') === 0) { path = path.substring(6) } + const subtreeStart = path.indexOf('/') + if (subtreeStart > 0) { + pathBase = path.substring(0, subtreeStart) + pathRest = path.substring(subtreeStart) + } + } else if (CID.isCID(pathBase)) { + pathBase = pathBase.toBaseEncodedString() + } + + pathBase = (new CID(pathBase)).toBaseEncodedString() - return path - } else { - throw new Error('not valid cid or path') + return { + base: pathBase, + rest: pathRest.split('/').filter(Boolean) } } module.exports = (path, dag) => { try { - path = sanitize(path) + path = pathBaseAndRest(path) } catch (err) { return pull.error(err) } const d = pullDefer.source() - const cid = new CID(path) + const cid = new CID(path.base) dag.get(cid, (err, node) => { if (err) { @@ -55,7 +58,7 @@ module.exports = (path, dag) => { return pull( d, pull.map((result) => result.value), - pull.map((node) => resolve(node, path, dag)), + pull.map((node) => resolve(node, path.base, path.rest, dag)), pull.flatten() ) } diff --git a/src/exporter/resolve.js b/src/exporter/resolve.js index 53259a9a..f26aec93 100644 --- a/src/exporter/resolve.js +++ b/src/exporter/resolve.js @@ -14,13 +14,14 @@ module.exports = Object.assign({ typeOf: typeOf }, resolvers) -function resolve (node, name, ipldResolver, parentNode) { +function resolve (node, hash, pathRest, ipldResolver, parentNode) { const type = typeOf(node) const resolver = resolvers[type] if (!resolver) { return pull.error(new Error('Unkown node type ' + type)) } - let stream = resolver(node, name, ipldResolver, resolve, parentNode) + // TODO: pass remaining path to filter output + let stream = resolver(node, hash, pathRest, ipldResolver, resolve, parentNode) return stream } diff --git a/test/node.js b/test/node.js index 7508b997..4a54048b 100644 --- a/test/node.js +++ b/test/node.js @@ -44,6 +44,7 @@ describe('IPFS UnixFS Engine', () => { require('./test-consumable-hash') require('./test-hamt') require('./test-exporter')(repo) + require('./test-export-subtree')(repo) require('./test-importer')(repo) require('./test-importer-flush')(repo) require('./test-import-export')(repo) diff --git a/test/test-dirbuilder-sharding.js b/test/test-dirbuilder-sharding.js index c7ea7741..a8aee269 100644 --- a/test/test-dirbuilder-sharding.js +++ b/test/test-dirbuilder-sharding.js @@ -334,6 +334,36 @@ module.exports = (repo) => { } } }) + + it('exports a big dir with subpath', (done) => { + const exportHash = mh.toB58String(rootHash) + '/big/big/2000' + const entries = {} + pull( + exporter(exportHash, ipldResolver), + pull.collect(collected) + ) + + function collected (err, nodes) { + expect(err).to.not.exist() + const paths = Object.keys(entries).sort() + expect(nodes.length).to.equal(4) + expect(nodes.map((node) => node.path)).to.deep.equal([ + 'QmTm3ZdKxyDLvcEePEvGfB2QReXsiAF7f39yjRcWwWrA6d', + 'QmTm3ZdKxyDLvcEePEvGfB2QReXsiAF7f39yjRcWwWrA6d/big', + 'QmTm3ZdKxyDLvcEePEvGfB2QReXsiAF7f39yjRcWwWrA6d/big/big', + 'QmTm3ZdKxyDLvcEePEvGfB2QReXsiAF7f39yjRcWwWrA6d/big/big/2000' + ]) + pull( + nodes[3].content, + pull.collect((err, content) => { + expect(err).to.not.exist() + expect(content.toString()).to.equal('2000') + done() + }) + ) + } + }) + }) }) } diff --git a/test/test-export-subtree.js b/test/test-export-subtree.js new file mode 100644 index 00000000..e9c31ec0 --- /dev/null +++ b/test/test-export-subtree.js @@ -0,0 +1,87 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +chai.use(require('dirty-chai')) +const expect = chai.expect +const BlockService = require('ipfs-block-service') +const IPLDResolver = require('ipld-resolver') +const UnixFS = require('ipfs-unixfs') +const bs58 = require('bs58') +const pull = require('pull-stream') +const zip = require('pull-zip') +const CID = require('cids') +const loadFixture = require('aegir/fixtures') +const Buffer = require('safe-buffer').Buffer + +const unixFSEngine = require('./../src') +const exporter = unixFSEngine.exporter + +const smallFile = loadFixture(__dirname, 'fixtures/200Bytes.txt') + +module.exports = (repo) => { + describe('exporter', () => { + let ipldResolver + + before(() => { + const bs = new BlockService(repo) + ipldResolver = new IPLDResolver(bs) + }) + + + it('export a file 2 levels down', (done) => { + const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1/200Bytes.txt' + + pull( + exporter(hash, ipldResolver), + pull.collect((err, files) => { + expect(err).to.not.exist() + expect(files.length).to.equal(3) + expect(files[0].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN') + expect(files[0].content).to.not.exist() + expect(files[1].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1') + expect(files[1].content).to.not.exist() + expect(files[2].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1/200Bytes.txt') + fileEql(files[2], smallFile, done) + }) + ) + }) + + it('export a non existing file', (done) => { + const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/doesnotexist' + + pull( + exporter(hash, ipldResolver), + pull.collect((err, files) => { + expect(err).to.not.exist() + expect(files.length).to.equal(1) + expect(files[0].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN') + expect(files[0].content).to.not.exist() + done() + }) + ) + }) + }) +} + +function fileEql (f1, f2, done) { + pull( + f1.content, + pull.collect((err, data) => { + if (err) { + return done(err) + } + + try { + if (f2) { + expect(Buffer.concat(data)).to.eql(f2) + } else { + expect(data).to.exist() + } + } catch (err) { + return done(err) + } + done() + }) + ) +} From 0d405d02463ea3399060b21f885baafa50edf159 Mon Sep 17 00:00:00 2001 From: Pedro Teixeira Date: Tue, 13 Jun 2017 18:12:12 +0100 Subject: [PATCH 05/14] removed unnecessary file --- src/exporter/dir.js | 59 --------------------------------------------- 1 file changed, 59 deletions(-) delete mode 100644 src/exporter/dir.js diff --git a/src/exporter/dir.js b/src/exporter/dir.js deleted file mode 100644 index 5b5aa930..00000000 --- a/src/exporter/dir.js +++ /dev/null @@ -1,59 +0,0 @@ -'use strict' - -// TODO: REMOVE?? - -const path = require('path') -const pull = require('pull-stream') -const paramap = require('pull-paramap') -const CID = require('cids') -const cat = require('pull-cat') - -const fileExporter = require('./file') -const switchType = require('../util').switchType - -// Logic to export a unixfs directory. -module.exports = dirExporter - -function dirExporter (node, name, pathRest, ipldResolver) { - // The algorithm below is as follows - // - // 1. Take all links from a given directory node - // 2. Map each link to their full name (parent + link name) + hash - // 3. Parallel map to - // 3.1. Resolve the hash against the dagService - // 3.2. Switch on the node type - // - `directory`: return node - // - `file`: use the fileExporter to load and return the file - // 4. Flatten - - const accepts = pathRest.shift() - - return pull( - pull.values(node.links), - pull.map((link) => ({ - linkName: link.name, - path: path.join(name, link.name), - hash: link.multihash - })), - pull.filter((item) => accepts === undefined || item.linkName === accepts), - paramap((item, cb) => ipldResolver.get(new CID(item.hash), (err, result) => { - if (err) { - return cb(err) - } - - const dir = { - path: item.path, - size: item.size - } - - const node = result.value - - cb(null, switchType( - node, - () => cat([pull.values([dir]), dirExporter(node, item.path, pathRest, ipldResolver)]), - () => fileExporter(node, item.path, pathRest, ipldResolver) - )) - })), - pull.flatten() - ) -} From cb960a4f07d77e22bf20b0b9f5b8d690fb9f4b05 Mon Sep 17 00:00:00 2001 From: Pedro Teixeira Date: Tue, 13 Jun 2017 18:12:44 +0100 Subject: [PATCH 06/14] removed unused dep --- package.json | 1 - 1 file changed, 1 deletion(-) diff --git a/package.json b/package.json index 546a0bef..5f0bb69a 100644 --- a/package.json +++ b/package.json @@ -60,7 +60,6 @@ "ipfs-unixfs": "^0.1.11", "ipld-dag-pb": "^0.11.0", "ipld-resolver": "^0.11.1", - "is-ipfs": "^0.3.0", "left-pad": "^1.1.3", "lodash": "^4.17.4", "multihashes": "^0.4.5", From 31597dd592f0f160a941beaaa4b622257b3659a2 Mon Sep 17 00:00:00 2001 From: Pedro Teixeira Date: Tue, 13 Jun 2017 18:12:52 +0100 Subject: [PATCH 07/14] passes linter --- src/exporter/index.js | 3 +-- test/test-dirbuilder-sharding.js | 3 --- test/test-export-subtree.js | 7 +------ 3 files changed, 2 insertions(+), 11 deletions(-) diff --git a/src/exporter/index.js b/src/exporter/index.js index 0ad31006..a07aa2f8 100644 --- a/src/exporter/index.js +++ b/src/exporter/index.js @@ -2,7 +2,6 @@ const pull = require('pull-stream') const CID = require('cids') -const v = require('is-ipfs') const pullDefer = require('pull-defer') const resolve = require('./resolve').resolve @@ -16,7 +15,7 @@ function pathBaseAndRest (path) { pathBase = (new CID(path)).toBaseEncodedString() } - if (typeof path === 'string') { + if (typeof path === 'string') { if (path.indexOf('/ipfs/') === 0) { path = path.substring(6) } diff --git a/test/test-dirbuilder-sharding.js b/test/test-dirbuilder-sharding.js index a8aee269..4674eea7 100644 --- a/test/test-dirbuilder-sharding.js +++ b/test/test-dirbuilder-sharding.js @@ -337,7 +337,6 @@ module.exports = (repo) => { it('exports a big dir with subpath', (done) => { const exportHash = mh.toB58String(rootHash) + '/big/big/2000' - const entries = {} pull( exporter(exportHash, ipldResolver), pull.collect(collected) @@ -345,7 +344,6 @@ module.exports = (repo) => { function collected (err, nodes) { expect(err).to.not.exist() - const paths = Object.keys(entries).sort() expect(nodes.length).to.equal(4) expect(nodes.map((node) => node.path)).to.deep.equal([ 'QmTm3ZdKxyDLvcEePEvGfB2QReXsiAF7f39yjRcWwWrA6d', @@ -363,7 +361,6 @@ module.exports = (repo) => { ) } }) - }) }) } diff --git a/test/test-export-subtree.js b/test/test-export-subtree.js index e9c31ec0..1707301e 100644 --- a/test/test-export-subtree.js +++ b/test/test-export-subtree.js @@ -6,12 +6,8 @@ chai.use(require('dirty-chai')) const expect = chai.expect const BlockService = require('ipfs-block-service') const IPLDResolver = require('ipld-resolver') -const UnixFS = require('ipfs-unixfs') -const bs58 = require('bs58') -const pull = require('pull-stream') -const zip = require('pull-zip') -const CID = require('cids') const loadFixture = require('aegir/fixtures') +const pull = require('pull-stream') const Buffer = require('safe-buffer').Buffer const unixFSEngine = require('./../src') @@ -28,7 +24,6 @@ module.exports = (repo) => { ipldResolver = new IPLDResolver(bs) }) - it('export a file 2 levels down', (done) => { const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1/200Bytes.txt' From d3f51c8fdff992d86ea6526d02b3e6c3c7182afa Mon Sep 17 00:00:00 2001 From: Pedro Teixeira Date: Wed, 14 Jun 2017 16:22:16 +0100 Subject: [PATCH 08/14] feat: support for cbor path into file --- src/exporter/resolve.js | 10 ++++++++-- src/exporter/unknown.js | 35 +++++++++++++++++++++++++++++++++++ test/test-export-subtree.js | 24 ++++++++++++++++++++++++ 3 files changed, 67 insertions(+), 2 deletions(-) create mode 100644 src/exporter/unknown.js diff --git a/src/exporter/resolve.js b/src/exporter/resolve.js index f26aec93..9835c5e1 100644 --- a/src/exporter/resolve.js +++ b/src/exporter/resolve.js @@ -6,7 +6,8 @@ const pull = require('pull-stream') const resolvers = { directory: require('./dir-flat'), 'hamt-sharded-directory': require('./dir-hamt-sharded'), - file: require('./file') + file: require('./file'), + unknown: require('./unknown') } module.exports = Object.assign({ @@ -15,7 +16,12 @@ module.exports = Object.assign({ }, resolvers) function resolve (node, hash, pathRest, ipldResolver, parentNode) { - const type = typeOf(node) + let type + try { + type = typeOf(node) + } catch (err) { + type = 'unknown' + } const resolver = resolvers[type] if (!resolver) { return pull.error(new Error('Unkown node type ' + type)) diff --git a/src/exporter/unknown.js b/src/exporter/unknown.js new file mode 100644 index 00000000..f8dcab48 --- /dev/null +++ b/src/exporter/unknown.js @@ -0,0 +1,35 @@ +'use strict' + +const path = require('path') +const CID = require('cids') +const pull = require('pull-stream') +const pullDefer = require('pull-defer') + +// Logic to export a single (possibly chunked) unixfs file. +module.exports = (node, name, pathRest, ipldResolver, resolve) => { + let newNode + if (pathRest.length) { + const pathElem = pathRest.shift() + newNode = node[pathElem] + const newName = path.join(name, pathElem) + if (CID.isCID(newNode)) { + const d = pullDefer.source() + ipldResolver.get(sanitizeCID(newNode), (err, newNode) => { + if (err) { + d.resolve(pull.error(err)) + } else { + d.resolve(resolve(newNode.value, newName, pathRest, ipldResolver, node)) + } + }) + return d + } else if (newNode !== undefined) { + return resolve(newNode, newName, pathRest, ipldResolver, node) + } else { + return pull.error('not found') + } + } +} + +function sanitizeCID (cid) { + return new CID(cid.version, cid.codec, cid.multihash) +} diff --git a/test/test-export-subtree.js b/test/test-export-subtree.js index 1707301e..b1f890e4 100644 --- a/test/test-export-subtree.js +++ b/test/test-export-subtree.js @@ -6,7 +6,9 @@ chai.use(require('dirty-chai')) const expect = chai.expect const BlockService = require('ipfs-block-service') const IPLDResolver = require('ipld-resolver') +const CID = require('cids') const loadFixture = require('aegir/fixtures') + const pull = require('pull-stream') const Buffer = require('safe-buffer').Buffer @@ -56,6 +58,28 @@ module.exports = (repo) => { }) ) }) + + it('exports starting from non-protobuf node', (done) => { + const doc = { a: { file: new CID('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN') } } + ipldResolver.put(doc, { format: 'dag-cbor' }, (err, cid) => { + expect(err).to.not.exist() + const nodeCID = cid.toBaseEncodedString() + + pull( + exporter(nodeCID + '/a/file/level-1/200Bytes.txt', ipldResolver), + pull.collect((err, files) => { + expect(err).to.not.exist() + expect(files.length).to.equal(3) + expect(files[0].path).to.equal('zdpuAzp9okHgbLQdmusXn8cRjr9js6nAM4JrvKDeqp2XEkFzD/a/file') + expect(files[0].content).to.not.exist() + expect(files[1].path).to.equal('zdpuAzp9okHgbLQdmusXn8cRjr9js6nAM4JrvKDeqp2XEkFzD/a/file/level-1') + expect(files[1].content).to.not.exist() + expect(files[2].path).to.equal('zdpuAzp9okHgbLQdmusXn8cRjr9js6nAM4JrvKDeqp2XEkFzD/a/file/level-1/200Bytes.txt') + fileEql(files[2], smallFile, done) + }) + ) + }) + }) }) } From 6bacd46e3d27db0565d4fa888453f9a26a26dc0d Mon Sep 17 00:00:00 2001 From: Pedro Teixeira Date: Wed, 14 Jun 2017 16:48:45 +0100 Subject: [PATCH 09/14] removed left-over comment from copy-pasta --- src/exporter/unknown.js | 1 - 1 file changed, 1 deletion(-) diff --git a/src/exporter/unknown.js b/src/exporter/unknown.js index f8dcab48..274eaa37 100644 --- a/src/exporter/unknown.js +++ b/src/exporter/unknown.js @@ -5,7 +5,6 @@ const CID = require('cids') const pull = require('pull-stream') const pullDefer = require('pull-defer') -// Logic to export a single (possibly chunked) unixfs file. module.exports = (node, name, pathRest, ipldResolver, resolve) => { let newNode if (pathRest.length) { From b446b0d219f902173c0ef389392a7e95c0fa1b59 Mon Sep 17 00:00:00 2001 From: Pedro Teixeira Date: Wed, 14 Jun 2017 16:52:03 +0100 Subject: [PATCH 10/14] detecting invalid node type if no path is given inside non-pb node --- src/exporter/resolve.js | 4 +--- src/exporter/unknown.js | 2 ++ 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/exporter/resolve.js b/src/exporter/resolve.js index 9835c5e1..d8951702 100644 --- a/src/exporter/resolve.js +++ b/src/exporter/resolve.js @@ -26,9 +26,7 @@ function resolve (node, hash, pathRest, ipldResolver, parentNode) { if (!resolver) { return pull.error(new Error('Unkown node type ' + type)) } - // TODO: pass remaining path to filter output - let stream = resolver(node, hash, pathRest, ipldResolver, resolve, parentNode) - return stream + return resolver(node, hash, pathRest, ipldResolver, resolve, parentNode) } function typeOf (node) { diff --git a/src/exporter/unknown.js b/src/exporter/unknown.js index 274eaa37..c58ac0ed 100644 --- a/src/exporter/unknown.js +++ b/src/exporter/unknown.js @@ -26,6 +26,8 @@ module.exports = (node, name, pathRest, ipldResolver, resolve) => { } else { return pull.error('not found') } + } else { + return pull.error(new Error('invalid node type')) } } From c060b53f1c0510179c5644a91ee37db8c087ca21 Mon Sep 17 00:00:00 2001 From: Pedro Teixeira Date: Thu, 15 Jun 2017 08:11:51 +0100 Subject: [PATCH 11/14] exporter: more elegant type checking --- src/exporter/{unknown.js => object.js} | 0 src/exporter/resolve.js | 16 +++++++--------- 2 files changed, 7 insertions(+), 9 deletions(-) rename src/exporter/{unknown.js => object.js} (100%) diff --git a/src/exporter/unknown.js b/src/exporter/object.js similarity index 100% rename from src/exporter/unknown.js rename to src/exporter/object.js diff --git a/src/exporter/resolve.js b/src/exporter/resolve.js index d8951702..71b1067a 100644 --- a/src/exporter/resolve.js +++ b/src/exporter/resolve.js @@ -7,7 +7,7 @@ const resolvers = { directory: require('./dir-flat'), 'hamt-sharded-directory': require('./dir-hamt-sharded'), file: require('./file'), - unknown: require('./unknown') + object: require('./object') } module.exports = Object.assign({ @@ -16,12 +16,7 @@ module.exports = Object.assign({ }, resolvers) function resolve (node, hash, pathRest, ipldResolver, parentNode) { - let type - try { - type = typeOf(node) - } catch (err) { - type = 'unknown' - } + const type = typeOf(node) const resolver = resolvers[type] if (!resolver) { return pull.error(new Error('Unkown node type ' + type)) @@ -30,6 +25,9 @@ function resolve (node, hash, pathRest, ipldResolver, parentNode) { } function typeOf (node) { - const data = UnixFS.unmarshal(node.data) - return data.type + if (Buffer.isBuffer(node.data)) { + return UnixFS.unmarshal(node.data).type + } else { + return 'object' + } } From 62cc87134dbcb8107cd187f1439046a5379d3596 Mon Sep 17 00:00:00 2001 From: Pedro Teixeira Date: Fri, 16 Jun 2017 15:59:14 +0100 Subject: [PATCH 12/14] fix: exporting: getting rid of /ipfs/ for good --- src/exporter/index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/exporter/index.js b/src/exporter/index.js index a07aa2f8..08017c9f 100644 --- a/src/exporter/index.js +++ b/src/exporter/index.js @@ -17,7 +17,7 @@ function pathBaseAndRest (path) { if (typeof path === 'string') { if (path.indexOf('/ipfs/') === 0) { - path = path.substring(6) + path = pathBase = path.substring(6) } const subtreeStart = path.indexOf('/') if (subtreeStart > 0) { From 8dcdac6b4194110b29d619fa9164f279e05b24d1 Mon Sep 17 00:00:00 2001 From: Pedro Teixeira Date: Fri, 16 Jun 2017 16:55:55 +0100 Subject: [PATCH 13/14] fix: exporter: when subtree is specified, result paths are not complete --- src/exporter/dir-flat.js | 18 +++++++++++---- src/exporter/dir-hamt-sharded.js | 23 +++++++++++++++---- test/test-dirbuilder-sharding.js | 9 +++----- test/test-export-subtree.js | 39 +++++++++++++++++++------------- 4 files changed, 58 insertions(+), 31 deletions(-) diff --git a/src/exporter/dir-flat.js b/src/exporter/dir-flat.js index 1227f633..8d04c1d5 100644 --- a/src/exporter/dir-flat.js +++ b/src/exporter/dir-flat.js @@ -10,15 +10,14 @@ const cat = require('pull-cat') module.exports = dirExporter function dirExporter (node, name, pathRest, ipldResolver, resolve, parent) { - const accepts = pathRest.shift() + const accepts = pathRest[0] const dir = { path: name, hash: node.multihash } - return cat([ - pull.values([dir]), + const streams = [ pull( pull.values(node.links), pull.map((link) => ({ @@ -32,9 +31,18 @@ function dirExporter (node, name, pathRest, ipldResolver, resolve, parent) { return cb(err) } - cb(null, resolve(n.value, item.path, pathRest, ipldResolver, name, parent)) + cb(null, resolve(n.value, accepts || item.path, pathRest, ipldResolver, name, parent)) })), pull.flatten() ) - ]) + ] + + // place dir before if not specifying subtree + if (!pathRest.length) { + streams.unshift(pull.values([dir])) + } + + pathRest.shift() + + return cat(streams) } diff --git a/src/exporter/dir-hamt-sharded.js b/src/exporter/dir-hamt-sharded.js index aa01d769..c301169a 100644 --- a/src/exporter/dir-hamt-sharded.js +++ b/src/exporter/dir-hamt-sharded.js @@ -19,8 +19,7 @@ function shardedDirExporter (node, name, pathRest, ipldResolver, resolve, parent }] } - return cat([ - pull.values(dir), + const streams = [ pull( pull.values(node.links), pull.map((link) => { @@ -28,12 +27,15 @@ function shardedDirExporter (node, name, pathRest, ipldResolver, resolve, parent const p = link.name.substring(2) const pp = p ? path.join(name, p) : name let accept = true + let fromPathRest = false if (p && pathRest.length) { + fromPathRest = true accept = (p === pathRest[0]) } if (accept) { return { + fromPathRest: fromPathRest, name: p, path: pp, hash: link.multihash, @@ -49,9 +51,22 @@ function shardedDirExporter (node, name, pathRest, ipldResolver, resolve, parent return cb(err) } - cb(null, resolve(n.value, item.path, item.pathRest, ipldResolver, (dir && dir[0]) || parent)) + cb( + null, + resolve( + n.value, + item.fromPathRest ? item.name : item.path, + item.pathRest, + ipldResolver, + (dir && dir[0]) || parent)) })), pull.flatten() ) - ]) + ] + + if (!pathRest.length) { + streams.unshift(pull.values(dir)) + } + + return cat(streams) } diff --git a/test/test-dirbuilder-sharding.js b/test/test-dirbuilder-sharding.js index 4674eea7..d575b161 100644 --- a/test/test-dirbuilder-sharding.js +++ b/test/test-dirbuilder-sharding.js @@ -344,15 +344,12 @@ module.exports = (repo) => { function collected (err, nodes) { expect(err).to.not.exist() - expect(nodes.length).to.equal(4) + expect(nodes.length).to.equal(1) expect(nodes.map((node) => node.path)).to.deep.equal([ - 'QmTm3ZdKxyDLvcEePEvGfB2QReXsiAF7f39yjRcWwWrA6d', - 'QmTm3ZdKxyDLvcEePEvGfB2QReXsiAF7f39yjRcWwWrA6d/big', - 'QmTm3ZdKxyDLvcEePEvGfB2QReXsiAF7f39yjRcWwWrA6d/big/big', - 'QmTm3ZdKxyDLvcEePEvGfB2QReXsiAF7f39yjRcWwWrA6d/big/big/2000' + '2000' ]) pull( - nodes[3].content, + nodes[0].content, pull.collect((err, content) => { expect(err).to.not.exist() expect(content.toString()).to.equal('2000') diff --git a/test/test-export-subtree.js b/test/test-export-subtree.js index b1f890e4..93067650 100644 --- a/test/test-export-subtree.js +++ b/test/test-export-subtree.js @@ -32,14 +32,27 @@ module.exports = (repo) => { pull( exporter(hash, ipldResolver), pull.collect((err, files) => { + expect(err).to.not.exist() + expect(files.length).to.equal(1) + expect(files[0].path).to.equal('200Bytes.txt') + fileEql(files[0], smallFile, done) + }) + ) + }) + + it('export dir 1 level down', (done) => { + const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1' + + pull( + exporter(hash, ipldResolver), + pull.collect((err, files) => { + console.log(files) expect(err).to.not.exist() expect(files.length).to.equal(3) - expect(files[0].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN') - expect(files[0].content).to.not.exist() - expect(files[1].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1') - expect(files[1].content).to.not.exist() - expect(files[2].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1/200Bytes.txt') - fileEql(files[2], smallFile, done) + expect(files[0].path).to.equal('level-1') + expect(files[1].path).to.equal('level-1/200Bytes.txt') + expect(files[2].path).to.equal('level-1/level-2') + fileEql(files[1], smallFile, done) }) ) }) @@ -51,9 +64,7 @@ module.exports = (repo) => { exporter(hash, ipldResolver), pull.collect((err, files) => { expect(err).to.not.exist() - expect(files.length).to.equal(1) - expect(files[0].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN') - expect(files[0].content).to.not.exist() + expect(files.length).to.equal(0) done() }) ) @@ -69,13 +80,9 @@ module.exports = (repo) => { exporter(nodeCID + '/a/file/level-1/200Bytes.txt', ipldResolver), pull.collect((err, files) => { expect(err).to.not.exist() - expect(files.length).to.equal(3) - expect(files[0].path).to.equal('zdpuAzp9okHgbLQdmusXn8cRjr9js6nAM4JrvKDeqp2XEkFzD/a/file') - expect(files[0].content).to.not.exist() - expect(files[1].path).to.equal('zdpuAzp9okHgbLQdmusXn8cRjr9js6nAM4JrvKDeqp2XEkFzD/a/file/level-1') - expect(files[1].content).to.not.exist() - expect(files[2].path).to.equal('zdpuAzp9okHgbLQdmusXn8cRjr9js6nAM4JrvKDeqp2XEkFzD/a/file/level-1/200Bytes.txt') - fileEql(files[2], smallFile, done) + expect(files.length).to.equal(1) + expect(files[0].path).to.equal('200Bytes.txt') + fileEql(files[0], smallFile, done) }) ) }) From 682b8c6f50dfd4340756bc565f9e05a5b35d9c66 Mon Sep 17 00:00:00 2001 From: David Dias Date: Fri, 16 Jun 2017 17:30:12 +0100 Subject: [PATCH 14/14] update circle config --- circle.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/circle.yml b/circle.yml index 434211a7..c04dc470 100644 --- a/circle.yml +++ b/circle.yml @@ -8,5 +8,6 @@ dependencies: - wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | sudo apt-key add - - sudo sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list' - sudo apt-get update + - sudo apt-get install libpango-1.0-0=1.40.1-1ubuntu1 libpangocairo-1.0-0=1.40.1-1ubuntu1 libpangoft2-1.0-0=1.40.1-1ubuntu1 libpangoxft-1.0-0=1.40.1-1ubuntu1 - sudo apt-get --only-upgrade install google-chrome-stable - google-chrome --version