-
Notifications
You must be signed in to change notification settings - Fork 43
Simplify options handling #238
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -5,28 +5,19 @@ const zipIt = require('@netlify/zip-it-and-ship-it') | |
const fromArray = require('from2-array') | ||
const pump = promisify(require('pump')) | ||
|
||
const { DEFAULT_CONCURRENT_HASH } = require('./constants') | ||
const { hasherCtor, manifestCollectorCtor } = require('./hasher_segments') | ||
|
||
const hashFns = async (dir, opts) => { | ||
opts = { | ||
concurrentHash: DEFAULT_CONCURRENT_HASH, | ||
assetType: 'function', | ||
hashAlgorithm: 'sha256', | ||
// tmpDir, | ||
statusCb: () => {}, | ||
...opts, | ||
} | ||
const hashFns = async (dir, { tmpDir, concurrentHash, hashAlgorithm = 'sha256', assetType = 'function', statusCb }) => { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
|
||
// early out if the functions dir is omitted | ||
if (!dir) return { functions: {}, shaMap: {} } | ||
if (!opts.tmpDir) throw new Error('Missing tmpDir directory for zipping files') | ||
if (!tmpDir) throw new Error('Missing tmpDir directory for zipping files') | ||
|
||
const functionZips = await zipIt.zipFunctions(dir, opts.tmpDir) | ||
const functionZips = await zipIt.zipFunctions(dir, tmpDir) | ||
|
||
const fileObjs = functionZips.map(({ path: functionPath, runtime }) => ({ | ||
filepath: functionPath, | ||
root: opts.tmpDir, | ||
relname: path.relative(opts.tmpDir, functionPath), | ||
root: tmpDir, | ||
relname: path.relative(tmpDir, functionPath), | ||
basename: path.basename(functionPath), | ||
extname: path.extname(functionPath), | ||
type: 'file', | ||
|
@@ -37,14 +28,14 @@ const hashFns = async (dir, opts) => { | |
|
||
const functionStream = fromArray.obj(fileObjs) | ||
|
||
const hasher = hasherCtor(opts) | ||
const hasher = hasherCtor({ concurrentHash, hashAlgorithm }) | ||
|
||
// Written to by manifestCollector | ||
// normalizedPath: hash (wanted by deploy API) | ||
const functions = {} | ||
// hash: [fileObj, fileObj, fileObj] | ||
const fnShaMap = {} | ||
const manifestCollector = manifestCollectorCtor(functions, fnShaMap, opts) | ||
const manifestCollector = manifestCollectorCtor(functions, fnShaMap, { statusCb, assetType }) | ||
|
||
await pump(functionStream, hasher, manifestCollector) | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -7,7 +7,7 @@ const map = require('through2-map').obj | |
const { normalizePath } = require('./util') | ||
|
||
// a parallel transform stream segment ctor that hashes fileObj's created by folder-walker | ||
const hasherCtor = ({ concurrentHash, hashAlgorithm = 'sha1' }) => { | ||
const hasherCtor = ({ concurrentHash, hashAlgorithm }) => { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
|
||
const hashaOpts = { algorithm: hashAlgorithm } | ||
if (!concurrentHash) throw new Error('Missing required opts') | ||
return transform(concurrentHash, { objectMode: true }, async (fileObj, cb) => { | ||
|
@@ -22,13 +22,14 @@ const hasherCtor = ({ concurrentHash, hashAlgorithm = 'sha1' }) => { | |
} | ||
|
||
// Inject normalized file names into normalizedPath and assetType | ||
const fileNormalizerCtor = ({ assetType = 'file' }) => | ||
const fileNormalizerCtor = ({ assetType }) => | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
|
||
map((fileObj) => ({ ...fileObj, assetType, normalizedPath: normalizePath(fileObj.relname) })) | ||
|
||
// A writable stream segment ctor that normalizes file paths, and writes shaMap's | ||
const manifestCollectorCtor = (filesObj, shaMap, { statusCb, assetType }) => { | ||
if (!statusCb || !assetType) throw new Error('Missing required options') | ||
return flushWriteStream.obj((fileObj, _, cb) => { | ||
// eslint-disable-next-line no-param-reassign | ||
filesObj[fileObj.normalizedPath] = fileObj.hash | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Ideally, we might want to return a copy, but in order to prevent any breaking, this keeps the fact that this function mutates arguments passed by its caller. |
||
|
||
// We map a hash to multiple fileObj's because the same file | ||
|
@@ -38,6 +39,7 @@ const manifestCollectorCtor = (filesObj, shaMap, { statusCb, assetType }) => { | |
// eslint-disable-next-line fp/no-mutating-methods | ||
shaMap[fileObj.hash].push(fileObj) | ||
} else { | ||
// eslint-disable-next-line no-param-reassign | ||
shaMap[fileObj.hash] = [fileObj] | ||
} | ||
statusCb({ | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -16,21 +16,24 @@ const hashFns = require('./hash_fns') | |
const uploadFiles = require('./upload_files') | ||
const { waitForDiff, waitForDeploy, getUploadList, defaultFilter } = require('./util') | ||
|
||
const deploySite = async (api, siteId, dir, opts) => { | ||
opts = { | ||
fnDir: null, | ||
configPath: null, | ||
draft: false, | ||
const deploySite = async ( | ||
api, | ||
siteId, | ||
dir, | ||
{ | ||
fnDir = null, | ||
configPath = null, | ||
draft = false, | ||
// API calls this the 'title' | ||
message: undefined, | ||
tmpDir: tempy.directory(), | ||
deployTimeout: DEFAULT_DEPLOY_TIMEOUT, | ||
concurrentHash: DEFAULT_CONCURRENT_HASH, | ||
concurrentUpload: DEFAULT_CONCURRENT_UPLOAD, | ||
filter: defaultFilter, | ||
syncFileLimit: DEFAULT_SYNC_LIMIT, | ||
maxRetry: DEFAULT_MAX_RETRY, | ||
statusCb: () => { | ||
message: title, | ||
tmpDir = tempy.directory(), | ||
deployTimeout = DEFAULT_DEPLOY_TIMEOUT, | ||
concurrentHash = DEFAULT_CONCURRENT_HASH, | ||
concurrentUpload = DEFAULT_CONCURRENT_UPLOAD, | ||
filter = defaultFilter, | ||
syncFileLimit = DEFAULT_SYNC_LIMIT, | ||
maxRetry = DEFAULT_MAX_RETRY, | ||
statusCb = () => { | ||
/* default to noop */ | ||
// statusObj: { | ||
// type: name-of-step | ||
|
@@ -39,22 +42,21 @@ const deploySite = async (api, siteId, dir, opts) => { | |
// spinner: a spinner from cli-spinners package | ||
// } | ||
}, | ||
// allows updating an existing deploy | ||
deployId: null, | ||
...opts, | ||
} | ||
|
||
const { fnDir, configPath, statusCb, message: title } = opts | ||
|
||
deployId: deployIdOpt = null, | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Distinguish between |
||
hashAlgorithm, | ||
assetType, | ||
branch, | ||
} = {}, | ||
) => { | ||
statusCb({ | ||
type: 'hashing', | ||
msg: `Hashing files...`, | ||
phase: 'start', | ||
}) | ||
|
||
const [{ files, filesShaMap }, { functions, fnShaMap }] = await Promise.all([ | ||
hashFiles(dir, configPath, opts), | ||
hashFns(fnDir, opts), | ||
hashFiles(dir, configPath, { concurrentHash, hashAlgorithm, assetType, statusCb, filter }), | ||
hashFns(fnDir, { tmpDir, concurrentHash, hashAlgorithm, statusCb, assetType }), | ||
]) | ||
|
||
const filesCount = Object.keys(files).length | ||
|
@@ -82,22 +84,22 @@ const deploySite = async (api, siteId, dir, opts) => { | |
body: { | ||
files, | ||
functions, | ||
async: Object.keys(files).length > opts.syncFileLimit, | ||
branch: opts.branch, | ||
draft: opts.draft, | ||
async: Object.keys(files).length > syncFileLimit, | ||
branch, | ||
draft, | ||
}, | ||
}) | ||
if (opts.deployId === null) { | ||
if (deployIdOpt === null) { | ||
if (title) { | ||
deployParams = { ...deployParams, title } | ||
} | ||
deploy = await api.createSiteDeploy(deployParams) | ||
} else { | ||
deployParams = { ...deployParams, deploy_id: opts.deployId } | ||
deployParams = { ...deployParams, deploy_id: deployIdOpt } | ||
deploy = await api.updateSiteDeploy(deployParams) | ||
} | ||
|
||
if (deployParams.body.async) deploy = await waitForDiff(api, deploy.id, siteId, opts.deployTimeout) | ||
if (deployParams.body.async) deploy = await waitForDiff(api, deploy.id, siteId, deployTimeout) | ||
|
||
const { id: deployId, required: requiredFiles, required_functions: requiredFns } = deploy | ||
|
||
|
@@ -111,22 +113,22 @@ const deploySite = async (api, siteId, dir, opts) => { | |
|
||
const uploadList = getUploadList(requiredFiles, filesShaMap).concat(getUploadList(requiredFns, fnShaMap)) | ||
|
||
await uploadFiles(api, deployId, uploadList, opts) | ||
await uploadFiles(api, deployId, uploadList, { concurrentUpload, statusCb, maxRetry }) | ||
|
||
statusCb({ | ||
type: 'wait-for-deploy', | ||
msg: 'Waiting for deploy to go live...', | ||
phase: 'start', | ||
}) | ||
deploy = await waitForDeploy(api, deployId, siteId, opts.deployTimeout) | ||
deploy = await waitForDeploy(api, deployId, siteId, deployTimeout) | ||
|
||
statusCb({ | ||
type: 'wait-for-deploy', | ||
msg: opts.draft ? 'Draft deploy is live!' : 'Deploy is live!', | ||
msg: draft ? 'Draft deploy is live!' : 'Deploy is live!', | ||
phase: 'stop', | ||
}) | ||
|
||
await rimraf(opts.tmpDir) | ||
await rimraf(tmpDir) | ||
|
||
const deployManifest = { | ||
deployId, | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
concurrentHash
andstatusCb
default values are assigned by the calling function.