Skip to content
This repository was archived by the owner on Feb 12, 2024. It is now read-only.

feat: add wrapWithDirectory flag to files.add et al #1290

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 3 additions & 13 deletions src/cli/commands/files/add.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@ const utils = require('../../utils')
const print = require('../../utils').print
const createProgressBar = require('../../utils').createProgressBar

const WRAPPER = 'wrapper/'

function checkPath (inPath, recursive) {
// This function is to check for the following possible inputs
// 1) "." add the cwd but throw error for no recursion flag
Expand Down Expand Up @@ -59,7 +57,6 @@ function getTotalBytes (path, recursive, cb) {

function addPipeline (index, addStream, list, argv) {
const {
wrapWithDirectory,
quiet,
quieter,
silent
Expand All @@ -79,17 +76,9 @@ function addPipeline (index, addStream, list, argv) {
pull.filter((file) => !file.isDirectory),
pull.map((file) => ({
path: file.path.substring(index, file.path.length),
originalPath: file.path
})),
pull.map((file) => ({
path: wrapWithDirectory ? WRAPPER + file.path : file.path,
content: fs.createReadStream(file.originalPath)
content: fs.createReadStream(file.path)
})),
addStream,
pull.map((file) => ({
hash: file.hash,
path: wrapWithDirectory ? file.path.substring(WRAPPER.length) : file.path
})),
pull.collect((err, added) => {
if (err) {
throw err
Expand Down Expand Up @@ -198,7 +187,8 @@ module.exports = {
cidVersion: argv.cidVersion,
rawLeaves: argv.rawLeaves,
onlyHash: argv.onlyHash,
hashAlg: argv.hash
hashAlg: argv.hash,
wrapWithDirectory: argv.wrapWithDirectory
}

// Temporary restriction on raw-leaves:
Expand Down
17 changes: 14 additions & 3 deletions src/core/components/files.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ const OtherBuffer = require('buffer').Buffer
const CID = require('cids')
const toB58String = require('multihashes').toB58String

const WRAPPER = 'wrapper/'

function noop () {}

function prepareFile (self, opts, file, callback) {
Expand All @@ -33,16 +35,17 @@ function prepareFile (self, opts, file, callback) {
(node, cb) => {
const b58Hash = cid.toBaseEncodedString()


cb(null, {
path: file.path || b58Hash,
path: opts.wrapWithDirectory ? file.path.substring(WRAPPER.length) : (file.path || b58Hash),
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I admittedly don't know 100% of what's going on here or why the go implementation behaves this way but having it return an empty path for the wrapper directory feels like an anti-pattern.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This was explained as "the wrapper is the root"

hash: b58Hash,
size: node.size
})
}
], callback)
}

function normalizeContent (content) {
function normalizeContent (opts, content) {
if (!Array.isArray(content)) {
content = [content]
}
Expand All @@ -68,6 +71,14 @@ function normalizeContent (content) {
}
}

if (opts.wrapWithDirectory && !data.path) {
throw new Error('Must provide a path when wrapping with a directory')
}

if (opts.wrapWithDirectory) {
data.path = WRAPPER + data.path
}

return data
})
}
Expand Down Expand Up @@ -123,7 +134,7 @@ module.exports = function files (self) {

opts.progress = progress
return pull(
pull.map(normalizeContent),
pull.map(normalizeContent.bind(null, opts)),
pull.flatten(),
importer(self._ipld, opts),
pull.asyncMap(prepareFile.bind(null, self, opts))
Expand Down
8 changes: 5 additions & 3 deletions src/http/api/resources/files.js
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,8 @@ exports.add = {
then: Joi.boolean().valid(false).required(),
otherwise: Joi.boolean().valid(false)
}),
'only-hash': Joi.boolean()
'only-hash': Joi.boolean(),
'wrap-with-directory': Joi.boolean()
})
// TODO: Necessary until validate "recursive", "stream-channels" etc.
.options({ allowUnknown: true })
Expand Down Expand Up @@ -208,7 +209,8 @@ exports.add = {
rawLeaves: request.query['raw-leaves'],
progress: request.query.progress ? progressHandler : null,
onlyHash: request.query['only-hash'],
hashAlg: request.query['hash']
hashAlg: request.query['hash'],
wrapWithDirectory: request.query['wrap-with-directory']
}

const aborter = abortable()
Expand Down Expand Up @@ -246,7 +248,7 @@ exports.add = {
ipfs.files.addPullStream(options),
pull.map((file) => {
return {
Name: file.path ? file.path : file.hash,
Name: file.path, //addPullStream already turned this into a hash if it wanted to
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This was already being done in addPullStream so I removed it from here, because addPullStream has a new case where it is leaving path as an empty string and wants it that way.

Hash: file.hash,
Size: file.size
}
Expand Down