Skip to content
This repository was archived by the owner on Mar 10, 2020. It is now read-only.

Commit 600e6c4

Browse files
authored
Merge pull request #54 from ipfs/feat/ipfs.files.get
feat/ipfs.files.get
2 parents 7cc174e + aea7585 commit 600e6c4

File tree

4 files changed

+223
-29
lines changed

4 files changed

+223
-29
lines changed

API/files/README.md

Lines changed: 42 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -93,9 +93,9 @@ ipfs.files.createAddStream(function (err, stream) {
9393

9494

9595

96-
#### `cat`
96+
#### `cat`
9797

98-
> Streams the file at the given IPFS multihash..
98+
> Streams the file at the given IPFS multihash.
9999
100100
##### `Go` **WIP**
101101

@@ -116,3 +116,43 @@ ipfs.files.cat(multihash, function (err, file) {
116116
})
117117
```
118118

119+
120+
#### `get`
121+
> Get [UnixFS][] files from IPFS.
122+
123+
##### `Go` **WIP**
124+
125+
##### `JavaScript` - ipfs.files.get(hash, [callback])
126+
127+
Where `hash` is an IPFS multiaddress or multihash.
128+
129+
`callback` must follow `function (err, stream) {}` signature, where `err` is an
130+
error if the operation was not successful. `stream` will be a Readable stream in
131+
[*object mode*](https://nodejs.org/api/stream.html#stream_object_mode),
132+
outputting objects of the form
133+
134+
```js
135+
{
136+
path: '/tmp/myfile.txt',
137+
content: <Readable stream>
138+
}
139+
```
140+
141+
Here, each `path` corresponds to the name of a file, and `content` is a regular
142+
Readable stream with the raw contents of that file.
143+
144+
If no `callback` is passed, a promise is returned with the Readable stream.
145+
146+
Example:
147+
148+
```js
149+
var multiaddr = '/ipfs/QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF'
150+
ipfs.files.get(multiaddr, function (err, stream) {
151+
stream.on('data', (file) => {
152+
// write the file's path and contents to standard out
153+
console.log(file.path)
154+
file.content.pipe(process.stdout)
155+
})
156+
})
157+
```
158+

README.md

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
- [Files](/API/files)
2121
- [`add`](/API/files#add)
2222
- [`createAddStream`](/files#createaddstream)
23+
- [`get`](/API/files#get)
2324
- [`cat`](/API/files#cat)
2425
- [Object](/API/object)
2526
- [`object.new`](/API/object#objectnew)
@@ -99,7 +100,7 @@ test.all(common)
99100
100101
## API
101102

102-
A valid (read: that follows this interface) IPFS core implementation, must expose the API described in [/API](/API)
103+
A valid (read: that follows this interface) IPFS core implementation must expose the API described in [/API](/API).
103104

104105
## Contribute
105106

@@ -114,3 +115,5 @@ This repository falls under the IPFS [Code of Conduct](https://github.com/ipfs/c
114115
## License
115116

116117
MIT
118+
119+
[UnixFS]: https://github.com/ipfs/specs/tree/master/unixfs

package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@
3131
"bl": "^1.1.2",
3232
"bs58": "^3.0.0",
3333
"chai": "^3.5.0",
34+
"concat-stream": "^1.5.1",
3435
"detect-node": "^2.0.3",
3536
"ipfs-merkle-dag": "^0.6.2",
3637
"readable-stream": "1.1.13"

src/files.js

Lines changed: 176 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -8,20 +8,29 @@ const bs58 = require('bs58')
88
const Readable = require('readable-stream')
99
const path = require('path')
1010
const fs = require('fs')
11-
const isNode = require('detect-node')
1211
const bl = require('bl')
12+
const concat = require('concat-stream')
13+
const through = require('through2')
1314

1415
module.exports = (common) => {
1516
describe('.files', () => {
1617
let smallFile
1718
let bigFile
19+
let directoryContent
1820
let ipfs
1921

2022
before((done) => {
21-
smallFile = fs.readFileSync(path.join(__dirname, './data/testfile.txt')
22-
)
23-
bigFile = fs.readFileSync(path.join(__dirname, './data/15mb.random')
24-
)
23+
smallFile = fs.readFileSync(path.join(__dirname, './data/testfile.txt'))
24+
bigFile = fs.readFileSync(path.join(__dirname, './data/15mb.random'))
25+
26+
directoryContent = {
27+
'pp.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/pp.txt')),
28+
'holmes.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/holmes.txt')),
29+
'jungle.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/jungle.txt')),
30+
'alice.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/alice.txt')),
31+
'files/hello.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/files/hello.txt')),
32+
'files/ipfs.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/files/ipfs.txt'))
33+
}
2534

2635
common.setup((err, _ipfs) => {
2736
expect(err).to.not.exist
@@ -102,15 +111,9 @@ module.exports = (common) => {
102111
})
103112

104113
it('add a nested dir as array', (done) => {
105-
if (!isNode) {
106-
return done()
107-
// can't run this test cause browserify
108-
// can't shim readFileSync in runtime
109-
}
110-
const base = path.join(__dirname, 'data/test-folder')
111114
const content = (name) => ({
112115
path: `test-folder/${name}`,
113-
content: fs.readFileSync(path.join(base, name))
116+
content: directoryContent[name]
114117
})
115118
const emptyDir = (name) => ({
116119
path: `test-folder/${name}`
@@ -131,30 +134,23 @@ module.exports = (common) => {
131134

132135
const added = res[res.length - 1]
133136
const mh = bs58.encode(added.node.multihash()).toString()
134-
expect(mh).to.equal('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP')
135-
expect(added.path).to.equal('test-folder')
136137
expect(added.node.links).to.have.length(6)
138+
expect(added.path).to.equal('test-folder')
139+
expect(mh).to.equal('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP')
140+
137141
done()
138142
})
139143
})
140144

141145
describe('.createAddStream', () => {
142146
it('stream of valid files and dirs', (done) => {
143-
if (!isNode) {
144-
return done()
145-
// can't run this test cause browserify
146-
// can't shim readFileSync in runtime
147-
}
148-
149-
const base = path.join(__dirname, 'data/test-folder')
150147
const content = (name) => ({
151148
path: `test-folder/${name}`,
152-
content: fs.readFileSync(path.join(base, name))
149+
content: directoryContent[name]
153150
})
154151
const emptyDir = (name) => ({
155152
path: `test-folder/${name}`
156153
})
157-
158154
const files = [
159155
content('pp.txt'),
160156
content('holmes.txt'),
@@ -243,7 +239,7 @@ module.exports = (common) => {
243239
})
244240

245241
describe('.cat', () => {
246-
it('with a bas58 multihash encoded string', () => {
242+
it('with a base58 multihash encoded string', () => {
247243
const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
248244

249245
return ipfs.cat(hash)
@@ -275,13 +271,167 @@ module.exports = (common) => {
275271
const hash = new Buffer(bs58.decode('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'))
276272
return ipfs.cat(hash)
277273
.then((stream) => {
278-
stream.pipe(bl((err, bldata) => {
274+
stream.pipe(bl((err, data) => {
279275
expect(err).to.not.exist
280-
expect(bldata.toString()).to.contain('Check out some of the other files in this directory:')
276+
expect(data.toString()).to.contain('Check out some of the other files in this directory:')
281277
}))
282278
})
283279
})
284280
})
285281
})
282+
283+
describe('.get', () => {
284+
it('with a base58 encoded multihash', (done) => {
285+
const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
286+
ipfs.files.get(hash, (err, stream) => {
287+
expect(err).to.not.exist
288+
stream.pipe(concat((files) => {
289+
expect(err).to.not.exist
290+
expect(files).to.be.length(1)
291+
expect(files[0].path).to.equal(hash)
292+
files[0].content.pipe(concat((content) => {
293+
expect(content.toString()).to.contain('Check out some of the other files in this directory:')
294+
done()
295+
}))
296+
}))
297+
})
298+
})
299+
300+
it('with a multihash', (done) => {
301+
const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
302+
const mhBuf = new Buffer(bs58.decode(hash))
303+
ipfs.files.get(mhBuf, (err, stream) => {
304+
expect(err).to.not.exist
305+
stream.pipe(concat((files) => {
306+
expect(files).to.be.length(1)
307+
expect(files[0].path).to.deep.equal(hash)
308+
files[0].content.pipe(concat((content) => {
309+
expect(content.toString()).to.contain('Check out some of the other files in this directory:')
310+
done()
311+
}))
312+
}))
313+
})
314+
})
315+
316+
it('large file', (done) => {
317+
const hash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq'
318+
ipfs.files.get(hash, (err, stream) => {
319+
expect(err).to.not.exist
320+
321+
// accumulate the files and their content
322+
var files = []
323+
stream.pipe(through.obj((file, enc, next) => {
324+
file.content.pipe(concat((content) => {
325+
files.push({
326+
path: file.path,
327+
content: content
328+
})
329+
next()
330+
}))
331+
}, () => {
332+
expect(files.length).to.equal(1)
333+
expect(files[0].path).to.equal(hash)
334+
expect(files[0].content).to.deep.equal(bigFile)
335+
done()
336+
}))
337+
})
338+
})
339+
340+
it('directory', (done) => {
341+
const hash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP'
342+
ipfs.files.get(hash, (err, stream) => {
343+
expect(err).to.not.exist
344+
345+
// accumulate the files and their content
346+
var files = []
347+
stream.pipe(through.obj((file, enc, next) => {
348+
if (file.content) {
349+
file.content.pipe(concat((content) => {
350+
files.push({
351+
path: file.path,
352+
content: content
353+
})
354+
next()
355+
}))
356+
} else {
357+
files.push(file)
358+
next()
359+
}
360+
}, () => {
361+
// Check paths
362+
var paths = files.map((file) => {
363+
return file.path
364+
})
365+
expect(paths).to.deep.equal([
366+
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP',
367+
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt',
368+
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder',
369+
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files',
370+
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/empty',
371+
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/hello.txt',
372+
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/ipfs.txt',
373+
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt',
374+
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt',
375+
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt'
376+
])
377+
378+
// Check contents
379+
var contents = files.map((file) => {
380+
return file.content ? file.content : null
381+
})
382+
expect(contents).to.deep.equal([
383+
null,
384+
directoryContent['alice.txt'],
385+
null,
386+
null,
387+
null,
388+
directoryContent['files/hello.txt'],
389+
directoryContent['files/ipfs.txt'],
390+
directoryContent['holmes.txt'],
391+
directoryContent['jungle.txt'],
392+
directoryContent['pp.txt']
393+
])
394+
done()
395+
}))
396+
})
397+
})
398+
399+
describe('promise', () => {
400+
it('with a base58 encoded string', (done) => {
401+
const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
402+
ipfs.files.get(hash)
403+
.then((stream) => {
404+
stream.pipe(concat((files) => {
405+
expect(files).to.be.length(1)
406+
expect(files[0].path).to.equal(hash)
407+
files[0].content.pipe(concat((content) => {
408+
expect(content.toString()).to.contain('Check out some of the other files in this directory:')
409+
done()
410+
}))
411+
}))
412+
})
413+
.catch((err) => {
414+
expect(err).to.not.exist
415+
})
416+
})
417+
418+
it('errors on invalid key', (done) => {
419+
const hash = 'somethingNotMultihash'
420+
ipfs.files.get(hash)
421+
.then((stream) => {})
422+
.catch((err) => {
423+
expect(err).to.exist
424+
const errString = err.toString()
425+
if (errString === 'Error: invalid ipfs ref path') {
426+
expect(err.toString()).to.contain('Error: invalid ipfs ref path')
427+
}
428+
if (errString === 'Error: Invalid Key') {
429+
expect(err.toString()).to.contain('Error: Invalid Key')
430+
}
431+
done()
432+
})
433+
})
434+
})
435+
})
286436
})
287437
}

0 commit comments

Comments
 (0)