|
1 | 1 | /* eslint-env mocha */
|
2 | 2 | 'use strict'
|
3 | 3 |
|
4 |
| -const expect = require('chai').expect |
5 |
| -const bs58 = require('bs58') |
6 |
| -const Readable = require('readable-stream') |
7 |
| -const path = require('path') |
8 |
| -const fs = require('fs') |
9 |
| -const isNode = require('detect-node') |
10 |
| - |
11 |
| -module.exports = (common) => { |
12 |
| - describe('.files/add', () => { |
13 |
| - let smallFile |
14 |
| - let bigFile |
15 |
| - let ipfs |
16 |
| - |
17 |
| - before((done) => { |
18 |
| - smallFile = fs.readFileSync(path.join(__dirname, './data/testfile.txt') |
19 |
| -) |
20 |
| - bigFile = fs.readFileSync(path.join(__dirname, './data/15mb.random') |
21 |
| -) |
22 |
| - |
23 |
| - common.setup((err, _ipfs) => { |
| 4 | +var expect = require('chai').expect |
| 5 | +var bs58 = require('bs58') |
| 6 | +var Readable = require('readable-stream') |
| 7 | +var path = require('path') |
| 8 | +var isNode = require('detect-node') |
| 9 | +var fs = require('fs') |
| 10 | +var bl = require('bl') |
| 11 | + |
| 12 | +module.exports = function (common) { |
| 13 | + describe('.files', function () { |
| 14 | + var smallFile = void 0 |
| 15 | + var bigFile = void 0 |
| 16 | + var ipfs = void 0 |
| 17 | + |
| 18 | + before(function (done) { |
| 19 | + smallFile = fs.readFileSync(path.join(__dirname, './data/testfile.txt')) |
| 20 | + bigFile = fs.readFileSync(path.join(__dirname, './data/15mb.random')) |
| 21 | + |
| 22 | + common.setup(function (err, _ipfs) { |
24 | 23 | expect(err).to.not.exist
|
25 | 24 | ipfs = _ipfs
|
26 | 25 | done()
|
27 | 26 | })
|
28 | 27 | })
|
29 | 28 |
|
30 |
| - after((done) => { |
| 29 | + after(function (done) { |
31 | 30 | common.teardown(done)
|
32 | 31 | })
|
33 | 32 |
|
34 |
| - it('stream', (done) => { |
35 |
| - const buffered = new Buffer('some data') |
36 |
| - const rs = new Readable() |
37 |
| - rs.push(buffered) |
38 |
| - rs.push(null) |
39 |
| - |
40 |
| - const arr = [] |
41 |
| - const filePair = {path: 'data.txt', content: rs} |
42 |
| - arr.push(filePair) |
43 |
| - |
44 |
| - ipfs.files.add(arr, (err, res) => { |
45 |
| - expect(err).to.not.exist |
46 |
| - expect(res).to.be.length(1) |
47 |
| - expect(res[0].path).to.equal('data.txt') |
48 |
| - expect(res[0].node.size()).to.equal(17) |
49 |
| - const mh = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS' |
50 |
| - expect(bs58.encode(res[0].node.multihash()).toString()).to.equal(mh) |
51 |
| - done() |
| 33 | + describe('.add', function () { |
| 34 | + it('stream', function (done) { |
| 35 | + var buffered = new Buffer('some data') |
| 36 | + var rs = new Readable() |
| 37 | + rs.push(buffered) |
| 38 | + rs.push(null) |
| 39 | + |
| 40 | + var arr = [] |
| 41 | + var filePair = { path: 'data.txt', content: rs } |
| 42 | + arr.push(filePair) |
| 43 | + |
| 44 | + ipfs.files.add(arr, function (err, res) { |
| 45 | + expect(err).to.not.exist |
| 46 | + expect(res).to.be.length(1) |
| 47 | + expect(res[0].path).to.equal('data.txt') |
| 48 | + expect(res[0].node.size()).to.equal(17) |
| 49 | + var mh = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS' |
| 50 | + expect(bs58.encode(res[0].node.multihash()).toString()).to.equal(mh) |
| 51 | + done() |
| 52 | + }) |
52 | 53 | })
|
53 |
| - }) |
54 | 54 |
|
55 |
| - it('buffer as tuple', (done) => { |
56 |
| - const file = { |
57 |
| - path: 'testfile.txt', |
58 |
| - content: smallFile |
59 |
| - } |
| 55 | + it('buffer as tuple', function (done) { |
| 56 | + if (!isNode) return done() |
60 | 57 |
|
61 |
| - ipfs.files.add([file], (err, res) => { |
62 |
| - expect(err).to.not.exist |
| 58 | + var file = { |
| 59 | + path: 'testfile.txt', |
| 60 | + content: smallFile |
| 61 | + } |
63 | 62 |
|
64 |
| - const added = res[0] != null ? res[0] : res |
65 |
| - const mh = bs58.encode(added.node.multihash()).toString() |
66 |
| - expect(mh).to.equal('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') |
67 |
| - expect(added.path).to.equal('testfile.txt') |
68 |
| - expect(added.node.links).to.have.length(0) |
69 |
| - done() |
| 63 | + ipfs.files.add([file], function (err, res) { |
| 64 | + expect(err).to.not.exist |
| 65 | + |
| 66 | + var added = res[0] != null ? res[0] : res |
| 67 | + var mh = bs58.encode(added.node.multihash()).toString() |
| 68 | + expect(mh).to.equal('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') |
| 69 | + expect(added.path).to.equal('testfile.txt') |
| 70 | + expect(added.node.links).to.have.length(0) |
| 71 | + done() |
| 72 | + }) |
70 | 73 | })
|
71 |
| - }) |
72 | 74 |
|
73 |
| - it('buffer', (done) => { |
74 |
| - ipfs.files.add(smallFile, (err, res) => { |
75 |
| - expect(err).to.not.exist |
| 75 | + it('buffer', function (done) { |
| 76 | + ipfs.files.add(smallFile, function (err, res) { |
| 77 | + expect(err).to.not.exist |
| 78 | + |
| 79 | + expect(res).to.have.length(1) |
| 80 | + var mh = bs58.encode(res[0].node.multihash()).toString() |
| 81 | + expect(mh).to.equal('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') |
| 82 | + expect(res[0].path).to.equal(mh) |
| 83 | + expect(res[0].node.links).to.have.length(0) |
| 84 | + done() |
| 85 | + }) |
| 86 | + }) |
76 | 87 |
|
77 |
| - expect(res).to.have.length(1) |
78 |
| - const mh = bs58.encode(res[0].node.multihash()).toString() |
79 |
| - expect(mh).to.equal('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') |
80 |
| - expect(res[0].path).to.equal(mh) |
81 |
| - expect(res[0].node.links).to.have.length(0) |
82 |
| - done() |
| 88 | + it('BIG buffer', function (done) { |
| 89 | + ipfs.files.add(bigFile, function (err, res) { |
| 90 | + expect(err).to.not.exist |
| 91 | + |
| 92 | + expect(res).to.have.length(1) |
| 93 | + expect(res[0].node.links).to.have.length(58) |
| 94 | + var mh = bs58.encode(res[0].node.multihash()).toString() |
| 95 | + expect(res[0].path).to.equal(mh) |
| 96 | + expect(mh).to.equal('Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq') |
| 97 | + done() |
| 98 | + }) |
83 | 99 | })
|
84 |
| - }) |
85 | 100 |
|
86 |
| - it('BIG buffer', (done) => { |
87 |
| - ipfs.files.add(bigFile, (err, res) => { |
88 |
| - expect(err).to.not.exist |
| 101 | + it('add a nested dir as array', function (done) { |
| 102 | + if (!isNode) { |
| 103 | + return done() |
| 104 | + } |
| 105 | + var base = path.join(__dirname, 'data/test-folder') |
| 106 | + var content = function content (name) { |
| 107 | + return { |
| 108 | + path: 'test-folder/' + name, |
| 109 | + content: fs.readFileSync(path.join(base, name)) |
| 110 | + } |
| 111 | + } |
| 112 | + var emptyDir = function emptyDir (name) { |
| 113 | + return { |
| 114 | + path: 'test-folder/' + name, |
| 115 | + dir: true |
| 116 | + } |
| 117 | + } |
| 118 | + var dirs = [content('pp.txt'), content('holmes.txt'), content('jungle.txt'), content('alice.txt'), emptyDir('empty-folder'), content('files/hello.txt'), content('files/ipfs.txt'), emptyDir('files/empty')] |
| 119 | + |
| 120 | + ipfs.files.add(dirs, function (err, res) { |
| 121 | + expect(err).to.not.exist |
| 122 | + |
| 123 | + var added = res[res.length - 1] |
| 124 | + var mh = bs58.encode(added.node.multihash()).toString() |
| 125 | + expect(mh).to.equal('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP') |
| 126 | + expect(added.path).to.equal('test-folder') |
| 127 | + expect(added.node.links).to.have.length(6) |
| 128 | + done() |
| 129 | + }) |
| 130 | + }) |
89 | 131 |
|
90 |
| - expect(res).to.have.length(1) |
91 |
| - expect(res[0].node.links).to.have.length(58) |
92 |
| - const mh = bs58.encode(res[0].node.multihash()).toString() |
93 |
| - expect(res[0].path).to.equal(mh) |
94 |
| - expect(mh).to.equal('Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq') |
95 |
| - done() |
| 132 | + describe('promise', function () { |
| 133 | + it('buffer', function () { |
| 134 | + return ipfs.files.add(smallFile).then(function (res) { |
| 135 | + var added = res[0] != null ? res[0] : res |
| 136 | + var mh = bs58.encode(added.node.multihash()).toString() |
| 137 | + expect(mh).to.equal('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') |
| 138 | + expect(added.path).to.equal(mh) |
| 139 | + expect(added.node.links).to.have.length(0) |
| 140 | + }).catch(function (err) { |
| 141 | + expect(err).to.not.exist |
| 142 | + }) |
| 143 | + }) |
96 | 144 | })
|
97 | 145 | })
|
98 | 146 |
|
99 |
| - it('add a nested dir as array', (done) => { |
100 |
| - if (!isNode) { |
101 |
| - return done() |
102 |
| - // can't run this test cause browserify |
103 |
| - // can't shim readFileSync in runtime |
104 |
| - } |
105 |
| - const base = path.join(__dirname, 'data/test-folder') |
106 |
| - const content = (name) => ({ |
107 |
| - path: `test-folder/${name}`, |
108 |
| - content: fs.readFileSync(path.join(base, name)) |
| 147 | + describe('.cat', function () { |
| 148 | + it('returns file stream', function (done) { |
| 149 | + var hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB' |
| 150 | + ipfs.cat(hash, function (err, file) { |
| 151 | + expect(err).to.not.exist |
| 152 | + file.pipe(bl(function (err, bldata) { |
| 153 | + expect(err).to.not.exist |
| 154 | + expect(bldata.toString()).to.contain('Check out some of the other files in this directory:') |
| 155 | + done() |
| 156 | + })) |
| 157 | + }) |
109 | 158 | })
|
110 |
| - const emptyDir = (name) => ({ |
111 |
| - path: `test-folder/${name}` |
| 159 | + |
| 160 | + // This fails on js-ipfs-api |
| 161 | + it('takes a buffer input', function (done) { |
| 162 | + var mhBuf = new Buffer(bs58.decode('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB')) |
| 163 | + ipfs.cat(mhBuf, function (err, file) { |
| 164 | + expect(err).to.not.exist |
| 165 | + file.pipe(bl(function (err, bldata) { |
| 166 | + expect(err).to.not.exist |
| 167 | + expect(bldata.toString()).to.contain('Check out some of the other files in this directory:') |
| 168 | + done() |
| 169 | + })) |
| 170 | + }) |
112 | 171 | })
|
113 |
| - const dirs = [ |
114 |
| - content('pp.txt'), |
115 |
| - content('holmes.txt'), |
116 |
| - content('jungle.txt'), |
117 |
| - content('alice.txt'), |
118 |
| - emptyDir('empty-folder'), |
119 |
| - content('files/hello.txt'), |
120 |
| - content('files/ipfs.txt'), |
121 |
| - emptyDir('files/empty') |
122 |
| - ] |
123 |
| - |
124 |
| - ipfs.files.add(dirs, (err, res) => { |
125 |
| - expect(err).to.not.exist |
126 | 172 |
|
127 |
| - const added = res[res.length - 1] |
128 |
| - const mh = bs58.encode(added.node.multihash()).toString() |
129 |
| - expect(mh).to.equal('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP') |
130 |
| - expect(added.path).to.equal('test-folder') |
131 |
| - expect(added.node.links).to.have.length(6) |
132 |
| - done() |
| 173 | + // You can add a large file to your ipfs repo and change the hash to the file after installing js-ipfs |
| 174 | + it('returns a large file', function (done) { |
| 175 | + var hash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq' |
| 176 | + ipfs.cat(hash, function (err, file) { |
| 177 | + expect(err).to.not.exist |
| 178 | + file.pipe(bl(function (err, bldata) { |
| 179 | + expect(err).to.not.exist |
| 180 | + expect(bldata).to.deep.equal(bigFile) |
| 181 | + done() |
| 182 | + })) |
| 183 | + }) |
133 | 184 | })
|
134 |
| - }) |
135 | 185 |
|
136 |
| - describe('promise', () => { |
137 |
| - it('buffer', () => { |
138 |
| - return ipfs.files.add(smallFile) |
139 |
| - .then((res) => { |
140 |
| - const added = res[0] != null ? res[0] : res |
141 |
| - const mh = bs58.encode(added.node.multihash()).toString() |
142 |
| - expect(mh).to.equal('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') |
143 |
| - expect(added.path).to.equal(mh) |
144 |
| - expect(added.node.links).to.have.length(0) |
| 186 | + it('returns error on invalid key', function (done) { |
| 187 | + var hash = 'somethingNotMultihash' |
| 188 | + ipfs.cat(hash, function (err, file) { |
| 189 | + expect(err).to.exist |
| 190 | + var errString = err.toString() |
| 191 | + if (errString === 'Error: invalid ipfs ref path') { |
| 192 | + expect(err.toString()).to.contain('Error: invalid ipfs ref path') |
| 193 | + } |
| 194 | + if (errString === 'Error: Invalid Key') { |
| 195 | + expect(err.toString()).to.contain('Error: Invalid Key') |
| 196 | + } |
| 197 | + done() |
| 198 | + }) |
| 199 | + }) |
| 200 | + |
| 201 | + describe('promise', function () { |
| 202 | + it('files.cat', function (done) { |
| 203 | + var hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB' |
| 204 | + ipfs.cat(hash).then(function (stream) { |
| 205 | + stream.pipe(bl(function (err, bldata) { |
| 206 | + expect(err).to.not.exist |
| 207 | + expect(bldata.toString()).to.contain('Check out some of the other files in this directory:') |
| 208 | + done() |
| 209 | + })) |
| 210 | + }).catch(function (err) { |
| 211 | + expect(err).to.not.exist |
| 212 | + }) |
| 213 | + }) |
| 214 | + |
| 215 | + it('returns error on invalid key', function (done) { |
| 216 | + var hash = 'somethingNotMultihash' |
| 217 | + ipfs.cat(hash).then(function (stream) {}).catch(function (err) { |
| 218 | + expect(err).to.exist |
| 219 | + var errString = err.toString() |
| 220 | + if (errString === 'Error: invalid ipfs ref path') { |
| 221 | + expect(err.toString()).to.contain('Error: invalid ipfs ref path') |
| 222 | + } |
| 223 | + if (errString === 'Error: Invalid Key') { |
| 224 | + expect(err.toString()).to.contain('Error: Invalid Key') |
| 225 | + } |
| 226 | + done() |
145 | 227 | })
|
146 |
| - .catch((err) => { |
| 228 | + }) |
| 229 | + |
| 230 | + it('takes a buffer input', function (done) { |
| 231 | + var hash = new Buffer(bs58.decode('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB')) |
| 232 | + ipfs.cat(hash).then(function (stream) { |
| 233 | + stream.pipe(bl(function (err, bldata) { |
| 234 | + expect(err).to.not.exist |
| 235 | + expect(bldata.toString()).to.contain('Check out some of the other files in this directory:') |
| 236 | + done() |
| 237 | + })) |
| 238 | + }).catch(function (err) { |
147 | 239 | expect(err).to.not.exist
|
148 | 240 | })
|
| 241 | + }) |
149 | 242 | })
|
150 | 243 | })
|
151 | 244 | })
|
|
0 commit comments