diff --git a/Dockerfile b/Dockerfile
deleted file mode 100644
index 7e75064e..00000000
--- a/Dockerfile
+++ /dev/null
@@ -1,14 +0,0 @@
-FROM node:10
-
-COPY ./src/cli/bin.js /app/src/cli/bin.js
-COPY ./package.json /app/package.json
-
-RUN cd app && npm install && npm link
-
-COPY ./src /app/src
-
-EXPOSE 50321
-
-RUN export STORE_S3_PATH=$HOSTNAME
-
-CMD ipfs-npm
diff --git a/README.md b/README.md
index 1a25f795..50bb3129 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,8 @@
-npm on IPFS
-===========
+
+
+
-
+# npm on IPFS
[](https://protocol.ai)
[](http://ipfs.io/)
@@ -12,7 +13,11 @@ npm on IPFS
> Install your favourite modules from the Distributed Web using IPFS. Have a cache always ready and share them in all your local networks.
-# Resources
+
+
+
+
+## Resources
- [The original demo video](https://vimeo.com/147968322)
- [Lengthy introduction in a blog post](http://daviddias.me/blog/stellar-module-management/)
@@ -22,8 +27,6 @@ npm on IPFS
[Alex Potsides](https://github.com/achingbrain)
-# Quick setup (probably all that you need)
-
## Install this module
```bash
@@ -32,106 +35,60 @@ npm on IPFS
# Usage
-Wait for the `Server running` message:
-
-```bash
-$ docker run ipfs-npm
-📦 Mirroring npm on localhost:50321
-😈 Using in-process IPFS daemon
-Swarm listening on /ip4/127.0.0.1/tcp/4003/ws/ipfs/Qm...
-Swarm listening on /ip4/127.0.0.1/tcp/4002/ipfs/Qm...
-Swarm listening on /ip4/172.17.0.2/tcp/4002/ipfs/Qm...
-🚀 Server running
-🔧 Please either update your npm config with 'npm config set registry http://localhost:50321'
-🔧 or use the '--registry' flag, eg: 'npm install --registry=http://localhost:50321'
-```
-
-Port `50321` is default and can be set with `--port`.
+`ipfs-npm` wraps your chosen package manager (e.g. npm or yarn) with configuration to use IPFS to retrieve your dependences instead of over HTTP from the central npm registry.
-## Configure npm
-
-Set up your npm to use `ipfs-npm` with the default port through:
+In the directory with your `package.json` file, run:
```bash
-$ npm config set registry http://localhost:50321
+$ ipfs-npm install
+👿 Spawning an in-process IPFS node
+👂 Loading registry index from https://registry.js.ipfs.io
+☎️ Dialling registry mirror /ip4/127.0.0.1/tcp/40020/ipfs/QmeXyYCLSivUn5Ju31jjPBYNKdncbzzEf6zdN2DyrdLAbe
+🗑️ Replacing old registry index if it exists
+📠 Copying registry index /ipfs/QmQmVsNFw3stJky7agrETeB9kZqkcvLSLRnFFMrhiR8zG1 to /npm-registry
+👩🚀 Starting local proxy
+🚀 Server running on port 57314
+🎁 Installing dependencies with /Users/alex/.nvm/versions/node/v10.8.0/bin/npm
+...
```
-If you picked another `--port` you need to adjust accordingly.
-
-Good to npm install away! :)
-
-# Usage
-
## CLI
```bash
$ ipfs-npm --help
ipfs-npm
-Starts a registry server that uses IPFS to fetch js dependencies
+Installs your js dependencies using IPFS
Options:
- --help Show help [boolean]
- --version Show version number [boolean]
- --clone Whether to clone the registry in the background
- [default: true]
- --eager-download Whether to eagerly download tarballs
- [default: true]
- --mirror-host Which host to listen to requests on
- [default: "localhost"]
- --mirror-port Which port to listen to requests on
- [default: 50321]
- --mirror-protocol Which protocol to use with the server
- [default: "http"]
- --mirror-registry Where to download missing files from/proxy for
- non-get requests
- [default: "https://registry.npmjs.com"]
- --mirror-upload-size-limit How large a file upload to allow when proxying for
- the registry [default: "1024MB"]
- --ipfs-port Which port the daemon is listening on
- [default: null]
- --external-host Which host to use when reaching this mirror
- --external-port Which port to use when reaching this mirror
- --external-protocol Which protocol to use when reaching this mirror
- --ipfs-host Which host the daemon is listening on
- [default: "localhost"]
- --ipfs-base-dir Which mfs prefix to use
- [default: "/commons-registry"]
- --ipfs-flush Whether to flush the MFS cache [default: true]
- --ipfs-max-requests How many concurrent requests to make to the IPFS
- daemon [default: 5]
- --ipfs-type "proc" to start an in process node, "go" or "js"
- to connect to a remote daemon (in conjunction with
- --ipfs-port and --ipfs-host). [default: "proc"]
- --clone-skim Which registry to clone
- [default: "https://replicate.npmjs.com/registry"]
- --clone-user-agent What user agent to specify when contacting the
- registry [default: "IPFS registry-mirror worker"]
- --clone-delay How long in ms to wait between cloning each module
+ --help Show help [boolean]
+ --version Show version number [boolean]
+ --package-manager Which package manager to use - eg. npm or yarn
+ [default: "npm"]
+ --ipfs-registry Where to download any packages that haven't made
+ it into the registry index yet from
+ [default: "https://registry.js.ipfs.io"]
+ --registry-upload-size-limit How large a file upload to allow when proxying
+ for the registry [default: "1024MB"]
+ --registry-update-interval Only request the manifest for a given module
+ every so many ms [default: 60000]
+ --ipfs-mfs-prefix Which mfs prefix to use
+ [default: "/npm-registry"]
+ --ipfs-node "proc" to start an in-process IPFS node, "go" or
+ "js" to spawn an IPFS node as a separate process
+ or a multiaddr that resolves to a running node
+ [default: "proc"]
+ --request-max-sockets How many concurrent http requests to make while
+ cloning the repo [default: 10]
+ --request-retries How many times to retry when downloading
+ manifests and tarballs from the registry
+ [default: 5]
+ --request-retry-delay How long in ms to wait between retries
[default: 1000]
- --clone-upgrade-to-https If a tarball is specifed with an http URL, whether
- to upgrade it to https [default: true]
- --request-max-sockets How many concurrent http requests to make while
- cloning the repo [default: 10]
+ --request-timeout How long in ms we should wait when requesting
+ files [default: 30000]
```
-## Docker
-
-```
-$ docker-compose build
-$ docker-compose up -d --scale registry=4
-```
-
-### Upgrading
-
-```
-$ ./deploy.sh
-```
-
-## Important
-
-If you are on Mac OS X, make sure to increase the limit of files open (with `ulimit -Sn 4096`), otherwise the ipfs daemon will be sad and throw 502 replies.
-
# Acknowledgements
This module takes a lot of inspiration from [reginabox](https://www.npmjs.com/package/reginabox). Big thank you to everyone that contributed with code or to the [discussion](https://github.com/ipfs/notes/issues/2) to make this happen.
diff --git a/conf/proxy.conf b/conf/proxy.conf
deleted file mode 100644
index 9fd1c39d..00000000
--- a/conf/proxy.conf
+++ /dev/null
@@ -1,21 +0,0 @@
-# HTTP 1.1 support
-proxy_http_version 1.1;
-proxy_buffering off;
-proxy_set_header Host $http_host;
-proxy_set_header Upgrade $http_upgrade;
-proxy_set_header Connection $proxy_connection;
-proxy_set_header X-Real-IP $remote_addr;
-proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
-proxy_set_header X-Forwarded-Proto $proxy_x_forwarded_proto;
-proxy_set_header X-Forwarded-Ssl $proxy_x_forwarded_ssl;
-proxy_set_header X-Forwarded-Port $proxy_x_forwarded_port;
-
-# Mitigate httpoxy attack (see README for details)
-proxy_set_header Proxy "";
-
-# Increase proxy timeouts
-proxy_connect_timeout 75s;
-proxy_send_timeout 60s;
-proxy_read_timeout 3600s;
-
-client_max_body_size 1024m;
diff --git a/deploy.sh b/deploy.sh
deleted file mode 100755
index 85115503..00000000
--- a/deploy.sh
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/bash -eux
-
-# Get the latest
-git pull
-
-# Build a Docker image
-docker-compose build --no-cache registry
-
-# Shut down the registry containers
-docker-compose stop registry
-
-# Restart using the new image
-docker-compose up -d --no-deps --force-recreate --scale registry=5 registry
diff --git a/docker-compose.yml b/docker-compose.yml
deleted file mode 100644
index 261c1197..00000000
--- a/docker-compose.yml
+++ /dev/null
@@ -1,45 +0,0 @@
-version: '2'
-
-services:
-
- proxy:
- image: jwilder/nginx-proxy:alpine
- links:
- - registry
- ports:
- - '80:80'
- - '443:443'
- volumes:
- - /var/run/docker.sock:/tmp/docker.sock:ro
- - /etc/nginx/vhost.d
- - /usr/share/nginx/html
- - /etc/nginx/certs
- - ./conf/proxy.conf:/etc/nginx/proxy.conf
- restart: 'always'
-
- letsencrypt-nginx-proxy-companion:
- image: jrcs/letsencrypt-nginx-proxy-companion
- volumes:
- - /var/run/docker.sock:/var/run/docker.sock:ro
- volumes_from:
- - proxy
-
- registry:
- build: .
- restart: 'always'
- environment:
- - VIRTUAL_HOST=registry.js.ipfs.io
- - VIRTUAL_PORT=50321
- - LETSENCRYPT_HOST=registry.js.ipfs.io
- - LETSENCRYPT_EMAIL=alex@achingbrain.net
- - CLONE_EAGER_DOWNLOAD=false
- - CLONE=false
- - EXTERNAL_PROTOCOL=https
- - EXTERNAL_HOST=registry.js.ipfs.io
- - EXTERNAL_PORT=443
- - IPFS_FLUSH=true
- - STORE_TYPE=s3
- - STORE_S3_REGION=${STORE_S3_REGION}
- - STORE_S3_BUCKET=${STORE_S3_BUCKET}
- - STORE_S3_ACCESS_KEY_ID=${STORE_S3_ACCESS_KEY_ID}
- - STORE_S3_SECRET_ACCESS_KEY=${STORE_S3_SECRET_ACCESS_KEY}
diff --git a/img/npm-on-ipfs.svg b/img/npm-on-ipfs.svg
index ee845650..a9aee11e 100644
--- a/img/npm-on-ipfs.svg
+++ b/img/npm-on-ipfs.svg
@@ -1,5 +1,5 @@
-
+
npm-on-ipfs
Original idea by @olizilla.
diff --git a/package.json b/package.json
index 1b94c0b6..2c9653d2 100644
--- a/package.json
+++ b/package.json
@@ -1,11 +1,10 @@
{
"name": "ipfs-npm",
"version": "0.10.0",
- "description": "Set up a NPM registry mirror, using your favourite storage, IPFS! :D",
+ "description": "Install your npm dependencies using IPFS and the distributed web! :D",
"leadMaintainer": "Alex Potsides ",
- "main": "src/index.js",
+ "main": "src/cli/bin.js",
"bin": {
- "registry-mirror": "src/cli/bin.js",
"ipfs-npm": "src/cli/bin.js"
},
"scripts": {
@@ -13,7 +12,7 @@
"test:node": "aegir test -t node",
"coverage": "aegir coverage",
"lint": "aegir lint",
- "start": "CLONE_EAGER_DOWNLOAD=false CLONE=false IPFS_FLUSH=true registry-mirror",
+ "start": "node .",
"release": "aegir release -t node",
"release-minor": "aegir release --type minor -t node",
"release-major": "aegir release --type major -t node"
@@ -34,19 +33,20 @@
},
"homepage": "https://github.com/ipfs-shipyard/npm-on-ipfs#readme",
"dependencies": {
- "aws-sdk": "^2.325.0",
- "datastore-s3": "~0.2.0",
+ "@yarnpkg/lockfile": "^1.1.0",
"debug": "^4.0.1",
"dnscache": "^1.0.1",
- "express": "^4.16.3",
+ "express": "^4.16.4",
"express-http-proxy": "^1.4.0",
- "express-prom-bundle": "^4.2.1",
- "follow-registry": "achingbrain/follow-registry",
- "hat": "~0.0.3",
- "ipfs": "~0.32.2",
+ "ipfs": "~0.32.3",
+ "ipfs-api": "^25.0.0",
+ "ipfs-registry-mirror-common": "^1.0.0",
+ "ipfsd-ctl": "~0.39.2",
"once": "^1.4.0",
+ "output-buffer": "^1.2.0",
"request": "^2.88.0",
"request-promise": "^4.2.2",
+ "which-promise": "^1.0.0",
"yargs": "^12.0.1"
},
"devDependencies": {
@@ -54,8 +54,8 @@
"chai": "^4.1.2",
"dirty-chai": "^2.0.1",
"mock-require": "^3.0.2",
- "promise-delay": "^2.1.0",
- "sinon": "^6.1.5"
+ "npm-run-all": "^4.1.3",
+ "sinon": "^7.0.0"
},
"contributors": [
"Danny Arnold ",
diff --git a/src/cli/bin.js b/src/cli/bin.js
index a140c25d..b6744713 100755
--- a/src/cli/bin.js
+++ b/src/cli/bin.js
@@ -5,43 +5,23 @@
require('dnscache')({ enable: true })
const pkg = require('../../package')
-const path = require('path')
process.title = pkg.name
const yargs = require('yargs')
-yargs.command('$0', 'Starts a registry server that uses IPFS to fetch js dependencies', (yargs) => { // eslint-disable-line no-unused-expressions
+yargs.command('$0', 'Installs your js dependencies using IPFS', (yargs) => { // eslint-disable-line no-unused-expressions
yargs
- .option('clone', {
- describe: 'Whether to clone the registry in the background',
- default: true
- })
- .option('replicate', {
- describe: 'Whether to replicate the registry in the background',
- default: true
- })
- .option('eager-download', {
- describe: 'Whether to eagerly download tarballs',
- default: true
+ .option('package-manager', {
+ describe: 'Which package manager to use - eg. npm or yarn',
+ default: 'npm'
})
- .option('mirror-host', {
- describe: 'Which host to listen to requests on',
- default: 'localhost'
- })
- .option('mirror-port', {
- describe: 'Which port to listen to requests on',
- default: 50321
- })
- .option('mirror-protocol', {
- describe: 'Which protocol to use with the server',
- default: 'http'
- })
- .option('mirror-registry', {
- describe: 'Where to download missing files from/proxy for non-get requests',
- default: 'https://registry.npmjs.com'
+
+ .option('ipfs-registry', {
+ describe: 'Where to download any packages that haven\'t made it into the registry index yet from',
+ default: 'https://registry.js.ipfs.io'
})
- .option('mirror-upload-size-limit', {
+ .option('registry-upload-size-limit', {
describe: 'How large a file upload to allow when proxying for the registry',
default: '1024MB'
})
@@ -49,63 +29,20 @@ yargs.command('$0', 'Starts a registry server that uses IPFS to fetch js depende
describe: 'Only request the manifest for a given module every so many ms',
default: 60000
})
- .option('ipfs-port', {
- describe: 'Which port the daemon is listening on',
- default: null
- })
- .option('external-host', {
- describe: 'Which host to use when reaching this mirror'
- })
- .option('external-port', {
- describe: 'Which port to use when reaching this mirror'
- })
- .option('external-protocol', {
- describe: 'Which protocol to use when reaching this mirror'
- })
- .option('ipfs-host', {
- describe: 'Which host the daemon is listening on',
- default: 'localhost'
- })
- .option('ipfs-base-dir', {
+
+ .option('ipfs-mfs-prefix', {
describe: 'Which mfs prefix to use',
- default: '/commons-registry'
- })
- .option('ipfs-flush', {
- describe: 'Whether to flush the MFS cache',
- default: true
+ default: '/npm-registry'
})
- .option('ipfs-max-requests', {
- describe: 'How many concurrent requests to make to the IPFS daemon',
- default: 5
- })
- .option('ipfs-type', {
- describe: '"proc" to start an in process node, "go" or "js" to connect to a remote daemon (in conjunction with --ipfs-port and --ipfs-host).',
+ .option('ipfs-node', {
+ describe: '"proc" to start an in-process IPFS node, "go" or "js" to spawn an IPFS node as a separate process or a multiaddr that resolves to a running node',
default: 'proc'
})
- .option('ipfs-repo', {
- describe: 'The path to the IPFS repo you wish to use',
- default: path.join(process.env.HOME, '.jsipfs')
- })
- .option('clone-skim', {
- describe: 'Which skimdb to follow',
- default: 'https://replicate.npmjs.com/registry'
- })
- .option('clone-registry', {
- describe: 'Which registry to clone',
- default: 'replicate.npmjs.com/registry'
- })
- .option('clone-user-agent', {
- describe: 'What user agent to specify when contacting the registry',
- default: 'IPFS registry-mirror worker'
- })
- .option('clone-delay', {
- describe: 'How long in ms to wait between cloning each module',
- default: 1000
- })
- .option('clone-upgrade-to-https', {
- describe: 'If a tarball is specifed with an http URL, whether to upgrade it to https',
+ .option('ipfs-flush', {
+ describe: 'Whether to flush the MFS cache',
default: true
})
+
.option('request-max-sockets', {
describe: 'How many concurrent http requests to make while cloning the repo',
default: 10
@@ -122,28 +59,5 @@ yargs.command('$0', 'Starts a registry server that uses IPFS to fetch js depende
describe: 'How long in ms we should wait when requesting files',
default: 30000
})
- .option('store-type', {
- describe: 'Which type of datastore to use - fs, s3, etc',
- default: 'fs'
- })
- .option('store-s3-region', {
- describe: 'The s3 region to use'
- })
- .option('store-s3-bucket', {
- describe: 'The s3 bucket to use'
- })
- .option('store-s3-path', {
- describe: 'The path to use in an s3 bucket'
- })
- .option('store-s3-access-key-id', {
- describe: 'The s3 access key id to use'
- })
- .option('store-s3-secret-access-key', {
- describe: 'The s3 secret access key id to use'
- })
- .option('store-s3-create-if-missing', {
- describe: 'Whether to create the bucket if it is missing',
- default: false
- })
}, require('../core'))
.argv
diff --git a/src/core/clone/index.js b/src/core/clone/index.js
deleted file mode 100644
index ecd93357..00000000
--- a/src/core/clone/index.js
+++ /dev/null
@@ -1,97 +0,0 @@
-'use strict'
-
-const follow = require('follow-registry')
-const once = require('once')
-const EventEmitter = require('events').EventEmitter
-const log = require('debug')('ipfs:registry-mirror:clone')
-const replaceTarballUrls = require('../utils/replace-tarball-urls')
-const saveManifest = require('../utils/save-manifest')
-const saveTarballs = require('./save-tarballs')
-
-const emitter = new EventEmitter()
-
-const add = async (options, pkg, ipfs, emitter) => {
- log(`Adding ${pkg.name}`)
-
- pkg = replaceTarballUrls(options, pkg)
-
- saveManifest(pkg, ipfs, options)
- .then(() => {
- if (options.clone.eagerDownload) {
- log(`Eagerly downloading tarballs for ${pkg.name}`)
-
- saveTarballs(options, pkg, ipfs, emitter)
- .then(() => {
- log(`Added ${pkg.name}`)
-
- emitter.emit('processed', pkg)
- })
- .catch(error => {
- log(`Error adding tarballs for ${pkg.name} - ${error.stack}`)
- emitter.emit('error', error)
- })
- } else {
- log(`Not eagerly downloading tarballs for ${pkg.name}`)
-
- emitter.emit('processed', pkg)
- }
- })
- .catch(error => {
- log(`Error adding manifest for ${pkg.name} - ${error.stack}`)
- emitter.emit('error', error)
- })
-
- return pkg
-}
-
-module.exports = (options, ipfs) => {
- console.info('🦎 Replicating registry...')
-
- follow({
- ua: options.clone.userAgent,
- skim: options.clone.skim,
- registry: options.clone.registry,
- handler: async (data, callback) => {
- if (!data.json || !data.json.name) {
- return callback() // Bail, something is wrong with this change
- }
-
- console.info(`🎉 Updated version of ${data.json.name} received`)
-
- callback = once(callback)
-
- const mfsPath = `${options.store.baseDir}/${data.json.name}`
-
- const mfsVersion = await ipfs.files.read(mfsPath)
- .then(buffer => JSON.parse(buffer))
- .catch(error => {
- if (error.message.includes('file does not exist')) {
- log(`${mfsPath} not in MFS`)
- } else {
- log(`Could not read ${mfsPath}`, error)
- }
-
- return {}
- })
-
- // save our existing versions so we don't re-download tarballs we already have
- Object.keys(mfsVersion.versions || {}).forEach(versionNumber => {
- data.json.versions[versionNumber] = mfsVersion.versions[versionNumber]
- })
-
- add(options, data.json, ipfs, emitter)
- .then(() => {
- console.log(`🦕 [${data.seq}] processed ${data.json.name}`)
- })
- .catch((error) => {
- log(error)
- console.error(`💥 [${data.seq}] error processing ${data.json.name} - ${error}`)
- })
- .then(() => {
- setTimeout(() => callback(), options.clone.delay)
- })
- }
- })
-
- return emitter
-}
diff --git a/src/core/clone/pool.js b/src/core/clone/pool.js
deleted file mode 100644
index 75c7ee77..00000000
--- a/src/core/clone/pool.js
+++ /dev/null
@@ -1,23 +0,0 @@
-'use strict'
-
-const createPool = require('../utils/create-pool')
-
-// shared executor pool so as to not overwhelm the ipfs daemon
-let pool
-
-const getPool = (options) => {
- if (!pool) {
- let concurrency = 100
-
- if (options.ipfs.port) {
- // do not overload a remote IPFS daemon
- concurrency = options.clone.maxRequests
- }
-
- pool = createPool(concurrency)
- }
-
- return pool
-}
-
-module.exports = getPool
diff --git a/src/core/clone/save-tarball.js b/src/core/clone/save-tarball.js
deleted file mode 100644
index 815f18f2..00000000
--- a/src/core/clone/save-tarball.js
+++ /dev/null
@@ -1,112 +0,0 @@
-'use strict'
-
-const log = require('debug')('ipfs:registry-mirror:replicate:save-tarball')
-const request = require('../utils/retry-request')
-const CID = require('cids')
-const crypto = require('crypto')
-const loadManifest = require('../utils/load-manifest')
-const saveManifest = require('../utils/save-manifest')
-const {
- PassThrough
-} = require('stream')
-
-const saveTarball = (options, packageName, versionNumber, ipfs, emitter) => {
- const outputStream = new PassThrough()
-
- loadManifest(options, ipfs, packageName)
- .then(async (manifest) => {
- const version = manifest.versions[versionNumber]
-
- validate(version, versionNumber, packageName)
-
- if (version.dist.cid) {
- log(`Skipping version ${versionNumber} of ${packageName} - already downloaded`)
- outputStream.end()
-
- return
- }
-
- const startTime = Date.now()
- const cid = await downloadFile(options, ipfs, version.dist.source, version.dist.shasum, outputStream)
-
- console.info(`🏄♀️ Added ${version.dist.source} with hash ${cid} in ${Date.now() - startTime}ms`)
-
- await updateCid(options, ipfs, packageName, versionNumber, cid)
- })
- .catch(error => {
- console.error(`💥 Error storing tarball ${packageName} ${versionNumber} - ${error.stack}`)
- })
-
- return outputStream
-}
-
-const validate = (version, versionNumber, packageName) => {
- if (!version) {
- throw new Error(`Skipping invalid version ${versionNumber} of ${packageName} - version not in manifest`)
- }
-
- if (!version.dist) {
- throw new Error(`Skipping invalid version ${versionNumber} of ${packageName} - no dist section`)
- }
-
- if (!version.dist.source) {
- throw new Error(`Skipping invalid version ${versionNumber} of ${packageName} - no source`)
- }
-
- if (!version.dist.shasum) {
- throw new Error(`Skipping invalid version ${versionNumber} of ${packageName} - no shasum`)
- }
-}
-
-const updateCid = async (options, ipfs, packageName, versionNumber, cid) => {
- while (true) {
- let manifest = await loadManifest(options, ipfs, packageName)
- manifest.versions[versionNumber].dist.cid = cid
-
- await saveManifest(manifest, ipfs, options)
-
- manifest = await loadManifest(options, ipfs, packageName)
-
- if (manifest.versions[versionNumber].dist.cid === cid) {
- return
- }
- }
-}
-
-const downloadFile = async (options, ipfs, url, shasum, outputStream) => {
- log(`Downloading ${url}`)
-
- const hash = crypto.createHash('sha1')
- hash.setEncoding('hex')
- hash.on('error', () => {})
-
- return request(Object.assign({}, options.request, {
- uri: url
- }))
- .then(stream => {
- stream.pipe(outputStream)
- stream.pipe(hash)
-
- return ipfs.files.add(stream, {
- wrapWithDirectory: false
- })
- })
- .then(files => {
- const result = hash.read()
-
- if (result !== shasum) {
- // we've already piped to the client at this point so can't retry the download
- // abort saving the CID of the corrupted download to our copy of the manifest
- // instead so we retry next time it's requested
- throw new Error(`File downloaded from ${url} had invalid shasum ${result} - expected ${shasum}`)
- }
-
- log(`File downloaded from ${url} had shasum ${result} - matched ${shasum}`)
-
- const file = files.pop()
-
- return new CID(file.hash).toV1().toBaseEncodedString('base32')
- })
-}
-
-module.exports = saveTarball
diff --git a/src/core/clone/save-tarballs.js b/src/core/clone/save-tarballs.js
deleted file mode 100644
index b9e78926..00000000
--- a/src/core/clone/save-tarballs.js
+++ /dev/null
@@ -1,32 +0,0 @@
-'use strict'
-
-const getPool = require('./pool')
-const saveTarball = require('./save-tarball')
-
-const saveTarballs = async (options, pkg, ipfs, emitter) => {
- const pool = getPool(options)
-
- return Promise.all(
- Object.keys(pkg.versions || {})
- .map(versionNumber => {
- let version = pkg.versions[versionNumber]
-
- const fn = () => {
- return new Promise((resolve, reject) => {
- const stream = saveTarball(options, pkg.name, versionNumber, ipfs, emitter)
- stream.once('finish', () => resolve())
- stream.once('error', (error) => reject(error))
- })
- }
-
- // set an id on our tasks to make sure we don't queue two downloads for the same file
- // this is used by the pool to spot duplicate tasks
- fn.id = version.dist.source
-
- return fn
- })
- .map(fn => pool.addTask(fn))
- )
-}
-
-module.exports = saveTarballs
diff --git a/src/core/config.js b/src/core/config.js
index 3c417e14..fc739fab 100644
--- a/src/core/config.js
+++ b/src/core/config.js
@@ -1,74 +1,27 @@
'use strict'
-const toBoolean = (value) => {
- if (value === undefined) {
- return undefined
- }
-
- if (value === 'false' || value === '0' || value === 'no') {
- return false
- }
-
- if (value === 'true' || value === '1' || value === 'yes') {
- return true
- }
-
- return Boolean(value)
-}
-
-function option () {
- for (let i = 0; i < arguments.length; i++) {
- const arg = arguments[i]
-
- if (arg !== undefined && arg !== null && arg.toString() !== 'NaN') {
- return arg
- }
- }
-}
+const toBoolean = require('ipfs-registry-mirror-common/utils/to-boolean')
+const option = require('ipfs-registry-mirror-common/utils/option')
module.exports = (overrides = {}) => {
return {
- mirror: {
- host: option(process.env.MIRROR_HOST, overrides.mirrorHost),
- port: option(process.env.MIRROR_PORT, overrides.mirrorPort),
- protocol: option(process.env.MIRROR_PROTOCOL, overrides.mirrorProtocol),
- registry: option(process.env.MIRROR_REGISTRY, overrides.mirrorRegistry),
- uploadSizeLimit: option(process.env.MIRROR_UPLOAD_SIZE_LIMIT, overrides.mirrorUploadSizeLimit),
- registryUpdateInterval: option(process.env.REGISTRY_UPDATE_INTERVAL, overrides.registryUpdateInterval)
- },
- external: {
- host: option(process.env.EXTERNAL_HOST, overrides.externalHost),
- port: option(process.env.EXTERNAL_PORT, overrides.externalPort),
- protocol: option(process.env.EXTERNAL_PROTOCOL, overrides.externalProtocol)
- },
+ packageManager: option(process.env.PACKAGE_MANAGER, overrides.packageManager),
+ registry: option(process.env.IPFS_REGISTRY, overrides.ipfsRegistry),
+ registryUpdateInterval: option(Number(process.env.REGISTRY_UPDATE_INTERVAL), overrides.registryUpdateInterval),
+ registryUploadSizeLimit: option(process.env.MIRROR_UPLOAD_SIZE_LIMIT, overrides.registryUploadSizeLimit),
+
ipfs: {
- port: option(process.env.IPFS_PORT, overrides.ipfsPort),
host: option(process.env.IPFS_HOST, overrides.ipfsHost),
- repo: option(process.env.IPFS_REPO, overrides.ipfsRepo)
- },
- store: {
- type: option(process.env.STORE_TYPE, overrides.storeType),
- baseDir: option(process.env.IPFS_BASE_DIR, overrides.ipfsBaseDir),
- flush: option(toBoolean(process.env.IPFS_FLUSH), overrides.ipfsFlush),
- s3: {
- region: option(process.env.STORE_S3_REGION, overrides.storeS3Region),
- bucket: option(process.env.STORE_S3_BUCKET, overrides.storeS3Bucket),
- path: option(process.env.STORE_S3_PATH, overrides.storeS3Path),
- accessKeyId: option(process.env.STORE_S3_ACCESS_KEY_ID, overrides.storeS3AccessKeyId),
- secretAccessKey: option(process.env.STORE_S3_SECRET_ACCESS_KEY, overrides.storeS3SecretAccessKey),
- createIfMissing: option(process.env.STORE_S3_CREATE_IF_MISSING, overrides.createIfMissing)
- }
+ port: option(Number(process.env.IPFS_PORT), overrides.ipfsPort),
+ node: option(process.env.IPFS_NODE, overrides.ipfsNode),
+ prefix: option(process.env.IPFS_MFS_PREFIX, overrides.ipfsMfsPrefix),
+ flush: option(process.env.IPFS_FLUSH, overrides.ipfsFlush)
},
- clone: {
- enabled: option(toBoolean(process.env.CLONE), overrides.clone),
- delay: option(process.env.CLONE_DELAY, overrides.cloneDelay),
- registry: option(process.env.CLONE_REGISTRY_URL, overrides.cloneRegistry),
- skim: option(process.env.CLONE_SKIM_URL, overrides.cloneSkim),
- upgradeToHttps: option(toBoolean(process.env.CLONE_UPGRADE_TO_HTTPS), overrides.cloneUpgradeToHttps),
- eagerDownload: option(toBoolean(process.env.CLONE_EAGER_DOWNLOAD), overrides.eagerDownload),
- userAgent: option(process.env.CLONE_USER_AGENT, overrides.cloneUserAgent),
- maxRequests: option(Number(process.env.IPFS_MAX_REQUESTS), overrides.ipfsMaxRequests)
+
+ http: {
+ host: 'localhost'
},
+
request: {
pool: {
maxSockets: option(Number(process.env.REQUEST_MAX_SOCKETS), overrides.requestMaxSockets)
diff --git a/src/core/handlers/favicon.js b/src/core/handlers/favicon.js
deleted file mode 100644
index ef23fafe..00000000
--- a/src/core/handlers/favicon.js
+++ /dev/null
@@ -1,12 +0,0 @@
-'use strict'
-
-const fs = require('fs')
-const path = require('path')
-
-module.exports = (options, ipfs, app) => {
- return async (request, response, next) => {
- fs.createReadStream(path.join(__dirname, 'favicon.png'))
- .on('error', () => {})
- .pipe(response)
- }
-}
diff --git a/src/core/handlers/favicon.png b/src/core/handlers/favicon.png
deleted file mode 100644
index b5dc53c5..00000000
Binary files a/src/core/handlers/favicon.png and /dev/null differ
diff --git a/src/core/handlers/index.js b/src/core/handlers/index.js
deleted file mode 100644
index c4868a2f..00000000
--- a/src/core/handlers/index.js
+++ /dev/null
@@ -1,8 +0,0 @@
-'use strict'
-
-module.exports = {
- tarball: require('./tarball'),
- manifest: require('./manifest'),
- favicon: require('./favicon'),
- root: require('./root')
-}
diff --git a/src/core/handlers/manifest.js b/src/core/handlers/manifest.js
index dd8e0d71..2ab20964 100644
--- a/src/core/handlers/manifest.js
+++ b/src/core/handlers/manifest.js
@@ -1,11 +1,11 @@
'use strict'
-const log = require('debug')('ipfs:registry-mirror:handlers:manifest')
-const loadManifest = require('../utils/load-manifest')
-const sanitiseName = require('../utils/sanitise-name')
-const lol = require('../utils/error-message')
+const log = require('debug')('ipfs:ipfs-npm:handlers:manifest')
+const loadManifest = require('ipfs-registry-mirror-common/utils/load-manifest')
+const sanitiseName = require('ipfs-registry-mirror-common/utils/sanitise-name')
+const lol = require('ipfs-registry-mirror-common/utils/error-message')
-module.exports = (options, ipfs, app) => {
+module.exports = (config, ipfs, app) => {
return async (request, response, next) => {
log(`Requested ${request.path}`)
@@ -14,13 +14,13 @@ module.exports = (options, ipfs, app) => {
log(`Loading manifest for ${moduleName}`)
try {
- const manifest = await loadManifest(options, ipfs, moduleName)
+ const manifest = await loadManifest(config, ipfs, moduleName)
response.statusCode = 200
response.setHeader('Content-type', 'application/json; charset=utf-8')
response.send(JSON.stringify(manifest, null, request.query.format === undefined ? 0 : 2))
} catch (error) {
- console.error(`💥 Could not load manifest for ${moduleName}`, error)
+ console.error(`💥 Could not load manifest for ${moduleName}`, error) // eslint-disable-line no-console
if (error.message.includes('Not found')) {
response.statusCode = 404
diff --git a/src/core/handlers/root.js b/src/core/handlers/root.js
index bb782da2..f9d4bd63 100644
--- a/src/core/handlers/root.js
+++ b/src/core/handlers/root.js
@@ -2,31 +2,15 @@
const pkg = require('../../../package.json')
-module.exports = (options, ipfs, app) => {
+module.exports = () => {
return async (request, response, next) => {
- const [
- id,
- peers
- ] = await Promise.all([
- request.app.locals.ipfs.id(),
- request.app.locals.ipfs.swarm.addrs()
- ])
+ const info = {
+ name: pkg.name,
+ version: pkg.version
+ }
response.statusCode = 200
response.setHeader('Content-type', 'application/json; charset=utf-8')
- response.send(JSON.stringify({
- name: pkg.name,
- version: pkg.version,
- ipfs: id,
- peers: peers.map(peer => {
- const info = peer.id.toJSON()
-
- return {
- id: info.id,
- publicKey: info.pubKey,
- addresses: peer.multiaddrs.toArray().map(multiaddr => multiaddr.toString())
- }
- })
- }, null, request.query.format === undefined ? 0 : 2))
+ response.send(JSON.stringify(info, null, request.query.format === undefined ? 0 : 2))
}
}
diff --git a/src/core/handlers/tarball.js b/src/core/handlers/tarball.js
index a5698643..c316d6b6 100644
--- a/src/core/handlers/tarball.js
+++ b/src/core/handlers/tarball.js
@@ -1,11 +1,11 @@
'use strict'
-const log = require('debug')('ipfs:registry-mirror:handlers:tarball')
+const log = require('debug')('ipfs:ipfs-npm:handlers:tarball')
const path = require('path')
-const loadTarball = require('../utils/load-tarball')
-const lol = require('../utils/error-message')
+const loadTarball = require('ipfs-registry-mirror-common/utils/load-tarball')
+const lol = require('ipfs-registry-mirror-common/utils/error-message')
-module.exports = (options, ipfs, app) => {
+module.exports = (config, ipfs, app) => {
return async (request, response, next) => {
log(`Requested ${request.path}`)
@@ -14,7 +14,7 @@ module.exports = (options, ipfs, app) => {
log(`Loading ${file}`)
try {
- const readStream = await loadTarball(options, ipfs, file, app)
+ const readStream = await loadTarball(config, ipfs, file)
readStream.on('error', (error) => {
log(`Error loading ${file} - ${error}`)
@@ -38,7 +38,7 @@ module.exports = (options, ipfs, app) => {
})
.pipe(response)
} catch (error) {
- console.error(`💥 Could not load tarball for ${file}`, error)
+ console.error(`💥 Could not load tarball for ${file}`, error) // eslint-disable-line no-console
if (error.message.includes('Not found')) {
response.statusCode = 404
diff --git a/src/core/index.js b/src/core/index.js
index 1e8f0492..a4b0a63c 100644
--- a/src/core/index.js
+++ b/src/core/index.js
@@ -1,148 +1,127 @@
'use strict'
-const express = require('express')
-const once = require('once')
const config = require('./config')
-const {
- tarball,
- manifest,
- favicon,
- root
-} = require('./handlers')
-const clone = require('./clone')
-const getExternalUrl = require('./utils/get-external-url')
-const proxy = require('express-http-proxy')
-const prometheus = require('express-prom-bundle')
-const promisify = require('util').promisify
-const IPFS = require('ipfs')
-const metrics = prometheus({
- includeMethod: true,
- autoregister: false
-})
-const s3Repo = require('./utils/s3-repo')
+const startIpfs = require('./start-ipfs')
+const startServer = require('./start-server')
+const rewriteLockfile = require('./rewrite-lock-file')
+const request = require('ipfs-registry-mirror-common/utils/retry-request')
+const { spawn } = require('child_process')
+const which = require('which-promise')
+var OutputBuffer = require('output-buffer')
+
+const cleanUpOps = []
+
+const cleanUp = async () => {
+ Promise.all(
+ cleanUpOps.map(op => op())
+ )
+ .then(() => {
+ process.exit(0)
+ })
+}
+
+process.on('SIGTERM', cleanUp)
+process.on('SIGINT', cleanUp)
module.exports = async (options) => {
options = config(options)
- console.info(`📦 Mirroring npm on ${getExternalUrl(options)}`)
-
- if (options.store.type === 's3') {
- console.info(`☁️ Using s3 storage`)
+ const ipfs = await startIpfs(options)
- options.ipfs.repo = s3Repo({
- region: options.store.s3.region,
- path: options.store.s3.path,
- bucket: options.store.s3.bucket,
- accessKeyId: options.store.s3.accessKeyId,
- secretAccessKey: options.store.s3.secretAccessKey,
- createIfMissing: options.store.s3.createIfMissing
+ cleanUpOps.push(() => {
+ return new Promise((resolve) => {
+ ipfs.stop(() => {
+ console.info('👿 IPFS node stopped') // eslint-disable-line no-console
+ resolve()
+ })
})
- }
+ })
- const ipfs = await getAnIPFS(options)
+ console.info('🗂️ Loading registry index from', options.registry) // eslint-disable-line no-console
- const app = express()
- app.use(function (request, response, next) {
- response.locals.start = Date.now()
+ const mirror = await request(Object.assign({}, options.request, {
+ uri: options.registry,
+ json: true
+ }))
- response.on('finish', () => {
- const disposition = response.getHeader('Content-Disposition')
- let prefix = '📄'
+ console.info('☎️ Dialling registry mirror', mirror.ipfs.addresses.join(',')) // eslint-disable-line no-console
- if (disposition && disposition.endsWith('tgz"')) {
- prefix = '🎁'
- }
+ let connected
- console.info(`${prefix} ${request.method} ${request.url} ${response.statusCode} ${Date.now() - response.locals.start}ms`)
+ await Promise.all(
+ mirror.ipfs.addresses.map(addr => {
+ return ipfs.api.swarm.connect(mirror.ipfs.addresses[0])
+ .then(() => {
+ connected = true
+ })
+ .catch((error) => {
+ console.info(error)
+ })
})
+ )
- next()
- })
-
- app.use(metrics)
- app.use('/-/metrics', metrics.metricsMiddleware)
+ if (connected) {
+ console.info('🗑️ Replacing old registry index if it exists') // eslint-disable-line no-console
- // let the world know what version we are
- app.get('/', root(options, ipfs, app))
- app.get('/favicon.ico', favicon(options, ipfs, app))
- app.get('/favicon.png', favicon(options, ipfs, app))
+ try {
+ await ipfs.api.files.rm(options.ipfs.prefix, {
+ recursive: true
+ })
+ } catch (error) {
- // intercept requests for tarballs and manifests
- app.get('/*.tgz', tarball(options, ipfs, app))
- app.get('/*', manifest(options, ipfs, app))
+ }
- // everything else should just proxy for the registry
- const registry = proxy(options.mirror.registry, {
- limit: options.mirror.uploadSizeLimit
- })
- app.put('/*', registry)
- app.post('/*', registry)
- app.patch('/*', registry)
- app.delete('/*', registry)
+ console.info('📠 Copying registry index', mirror.root, 'to', options.ipfs.prefix) // eslint-disable-line no-console
- app.use(function (error, request, response, next) {
- console.error(`💀 ${request.method} ${request.url} ${response.statusCode} - ${error.stack}`)
+ await ipfs.api.files.cp(mirror.root, options.ipfs.prefix)
- next()
- })
-
- if (options.ipfs.port && options.ipfs.host) {
- options.store.port = options.ipfs.port
- options.store.host = options.ipfs.host
- console.info(`👺 Connecting to remote IPFS daemon at ${options.ipfs.port}:${options.ipfs.host}`)
+ console.info('💌 Copied registry index', mirror.root, 'to', options.ipfs.prefix) // eslint-disable-line no-console
} else {
- console.info('😈 Using in-process IPFS daemon')
+ console.info('📴 Could not dial mirror, running without latest registry index') // eslint-disable-line no-console
}
- if (options.clone.enabled) {
- clone(options, ipfs)
- }
+ console.info('👩🚀 Starting local proxy') // eslint-disable-line no-console
- return new Promise(async (resolve, reject) => {
- const callback = once((error) => {
- if (error) {
- reject(error)
- }
-
- if (!options.mirror.port) {
- options.mirror.port = server.address().port
- }
-
- let url = getExternalUrl(options)
-
- console.info('🚀 Server running')
- console.info(`🔧 Please either update your npm config with 'npm config set registry ${url}'`)
- console.info(`🔧 or use the '--registry' flag, eg: 'npm install --registry=${url}'`)
-
- resolve({
- server,
- app,
- stop: () => {
- return Promise.all([
- promisify(server.close.bind(server))(),
- ipfs.stop()
- ])
- .then(() => {
- console.info('✋ Server stopped')
- })
- }
+ const server = await startServer(options, ipfs.api)
+
+ cleanUpOps.push(() => {
+ return new Promise((resolve) => {
+ server.close(() => {
+ console.info('✋ Server stopped') // eslint-disable-line no-console
+ resolve()
})
})
+ })
+
+ const packageManager = await which(options.packageManager)
+
+ console.info(`🎁 Installing dependencies with ${packageManager}`) // eslint-disable-line no-console
- let server = app.listen(options.mirror.port, callback)
- server.once('error', callback)
+ const proc = spawn(packageManager, [
+ `--registry=http://localhost:${options.http.port}`
+ ].concat(process.argv.slice(2)))
- app.locals.ipfs = ipfs
+ const buffer = new OutputBuffer((line) => {
+ console.info(`🐨 ${line}`) // eslint-disable-line no-console
})
-}
-const getAnIPFS = promisify((options, callback) => {
- console.info(`🏁 Starting an IPFS instance`)
- callback = once(callback)
+ proc.stdout.on('data', (data) => {
+ buffer.append(data.toString())
+ })
- const ipfs = new IPFS({
- repo: options.ipfs.repo
+ proc.stderr.on('data', (data) => {
+ buffer.append(data.toString())
})
- ipfs.once('ready', () => callback(null, ipfs))
- ipfs.once('error', (error) => callback(error))
-})
+
+ proc.on('close', async (code) => {
+ buffer.flush()
+
+ console.log(`🎁 ${packageManager} exited with code ${code}`) // eslint-disable-line no-console
+
+ await rewriteLockfile(options)
+
+ await cleanUp()
+
+ process.exit(code)
+ })
+}
diff --git a/src/core/rewrite-lock-file.js b/src/core/rewrite-lock-file.js
new file mode 100644
index 00000000..6ef884f3
--- /dev/null
+++ b/src/core/rewrite-lock-file.js
@@ -0,0 +1,59 @@
+const {
+ readFileSync,
+ writeFileSync,
+ existsSync
+} = require('fs')
+const path = require('path')
+const URL = require('url').URL
+const yarnLockfile = require('@yarnpkg/lockfile')
+
+const replaceRegistryPath = (dependencies, registry) => {
+ Object.keys(dependencies)
+ .map(name => dependencies[name])
+ .forEach(dependency => {
+ if (dependency.resolved) {
+ const url = new URL(dependency.resolved)
+
+ url.protocol = registry.protocol
+ url.host = registry.host
+
+ dependency.resolved = url.toString()
+ }
+
+ replaceRegistryPath(dependency.dependencies || {}, registry)
+ })
+}
+
+module.exports = async (options) => {
+ if (options.packageManager === 'npm') {
+ const lockfilePath = path.join(process.cwd(), 'package-lock.json')
+
+ if (!existsSync(lockfilePath)) {
+ console.info(`🤷 No package-lock.json found`) // eslint-disable-line no-console
+ return
+ }
+
+ console.info(`🔏 Updating package-lock.json`) // eslint-disable-line no-console
+
+ const lockfile = JSON.parse(readFileSync(lockfilePath, 'utf8'))
+
+ replaceRegistryPath(lockfile.dependencies || {}, new URL(options.registry))
+
+ writeFileSync(lockfilePath, JSON.stringify(lockfile, null, 2))
+ } else if (options.packageManager === 'yarn') {
+ const lockfilePath = path.join(process.cwd(), 'yarn.lock')
+
+ if (!existsSync(lockfilePath)) {
+ console.info(`🤷 No yarn.lock found`) // eslint-disable-line no-console
+ return
+ }
+
+ console.info(`🔏 Updating yarn.lock`) // eslint-disable-line no-console
+
+ const lockfile = yarnLockfile.parse(readFileSync(lockfilePath, 'utf8'))
+
+ replaceRegistryPath(lockfile.object, new URL(options.registry))
+
+ writeFileSync(lockfilePath, yarnLockfile.stringify(lockfile, null, 2))
+ }
+}
diff --git a/src/core/start-ipfs.js b/src/core/start-ipfs.js
new file mode 100644
index 00000000..db5550b8
--- /dev/null
+++ b/src/core/start-ipfs.js
@@ -0,0 +1,55 @@
+'use strict'
+
+const IpfsApi = require('ipfs-api')
+const ipfsdCtrl = require('ipfsd-ctl')
+const which = require('which-promise')
+
+const spawn = (args) => {
+ return new Promise((resolve, reject) => {
+ ipfsdCtrl
+ .create(args)
+ .spawn({
+ init: true
+ }, (error, node) => {
+ if (error) {
+ return reject(error)
+ }
+
+ resolve(node)
+ })
+ })
+}
+
+const startIpfs = async (config) => {
+ if (config.ipfs.node === 'proc') {
+ console.info('👿 Spawning an in-process IPFS node') // eslint-disable-line no-console
+
+ return spawn({
+ type: 'proc',
+ exec: require('ipfs')
+ })
+ } else if (config.ipfs.node === 'js') {
+ console.info('👿 Spawning a js-IPFS node') // eslint-disable-line no-console
+
+ return spawn({
+ type: 'js',
+ exec: await which('jsipfs')
+ })
+ } else if (config.ipfs.node === 'go') {
+ console.info('👿 Spawning a go-IPFS node') // eslint-disable-line no-console
+
+ return spawn({
+ type: 'go',
+ exec: await which('ipfs')
+ })
+ }
+
+ console.info(`👿 Connecting to a remote IPFS node at ${config.ipfs.node}`) // eslint-disable-line no-console
+
+ return {
+ api: new IpfsApi(config.ipfs.node),
+ stop: (cb) => cb()
+ }
+}
+
+module.exports = startIpfs
diff --git a/src/core/start-server.js b/src/core/start-server.js
new file mode 100644
index 00000000..747a188a
--- /dev/null
+++ b/src/core/start-server.js
@@ -0,0 +1,60 @@
+'use strict'
+
+const express = require('express')
+const proxy = require('express-http-proxy')
+const once = require('once')
+const requestLog = require('ipfs-registry-mirror-common/handlers/request-log')
+const errorLog = require('ipfs-registry-mirror-common/handlers/error-log')
+const favicon = require('ipfs-registry-mirror-common/handlers/favicon')
+const root = require('./handlers/root')
+const tarball = require('./handlers/tarball')
+const manifest = require('./handlers/manifest')
+
+const startServer = (config, ipfs) => {
+ const app = express()
+
+ app.use(requestLog)
+
+ app.get('/favicon.ico', favicon(config, ipfs, app))
+ app.get('/favicon.png', favicon(config, ipfs, app))
+
+ app.get('/', root(config, ipfs, app))
+
+ // intercept requests for tarballs and manifests
+ app.get('/*.tgz', tarball(config, ipfs, app))
+ app.get('/*', manifest(config, ipfs, app))
+
+ // everything else should just proxy for the registry
+ const registry = proxy(config.registry, {
+ limit: config.registryUploadSizeLimit
+ })
+ app.put('/*', registry)
+ app.post('/*', registry)
+ app.patch('/*', registry)
+ app.delete('/*', registry)
+
+ app.use(errorLog)
+
+ app.locals.ipfs = ipfs
+
+ return new Promise(async (resolve, reject) => {
+ const callback = once((error) => {
+ if (error) {
+ reject(error)
+ }
+
+ if (!config.http.port) {
+ config.http.port = server.address().port
+ }
+
+ console.info(`🚀 Server running on port ${config.http.port}`) // eslint-disable-line no-console
+
+ resolve(server)
+ })
+
+ let server = app.listen(config.http.port, callback)
+ server.once('error', callback)
+ })
+}
+
+module.exports = startServer
diff --git a/src/core/utils/create-pool.js b/src/core/utils/create-pool.js
deleted file mode 100644
index 85102388..00000000
--- a/src/core/utils/create-pool.js
+++ /dev/null
@@ -1,53 +0,0 @@
-'use strict'
-
-module.exports = (concurrency) => {
- const queue = []
- let executing = 0
-
- const maybeExecuteNext = () => {
- if (executing === concurrency || !queue.length) {
- return
- }
-
- const task = queue.shift()
-
- executing++
- task.fn()
- .catch((error) => error)
- .then((result) => {
- executing--
-
- if (result instanceof Error) {
- task.reject(result)
- } else {
- task.resolve(result)
- }
-
- maybeExecuteNext()
- })
- }
-
- return {
- addTask: (fn) => {
- const existingTask = queue.find(other => fn.id && fn.id === other.fn.id)
-
- if (existingTask) {
- return existingTask.promise
- }
-
- const task = {
- fn
- }
- queue.push(task)
-
- task.promise = new Promise((resolve, reject) => {
- task.resolve = resolve
- task.reject = reject
-
- setImmediate(() => maybeExecuteNext())
- })
-
- return task.promise
- }
- }
-}
diff --git a/src/core/utils/error-message.js b/src/core/utils/error-message.js
deleted file mode 100644
index 82b9afef..00000000
--- a/src/core/utils/error-message.js
+++ /dev/null
@@ -1,7 +0,0 @@
-'use strict'
-
-const lol = (message) => {
- return `${message} `
-}
-
-module.exports = lol
diff --git a/src/core/utils/get-external-url.js b/src/core/utils/get-external-url.js
deleted file mode 100644
index 80397012..00000000
--- a/src/core/utils/get-external-url.js
+++ /dev/null
@@ -1,17 +0,0 @@
-'use strict'
-
-const {
- URL
-} = require('url')
-
-module.exports = (options) => {
- const url = new URL('http://foo.com')
- url.protocol = options.external.protocol || options.mirror.protocol
- url.host = options.external.host || options.mirror.host
- url.port = options.external.port || options.mirror.port
-
- const string = url.toString()
-
- // strip the trailing slash
- return string.substring(0, string.length - 1)
-}
diff --git a/src/core/utils/load-manifest.js b/src/core/utils/load-manifest.js
deleted file mode 100644
index 23f70f32..00000000
--- a/src/core/utils/load-manifest.js
+++ /dev/null
@@ -1,67 +0,0 @@
-'use strict'
-
-const request = require('../utils/retry-request')
-const log = require('debug')('ipfs:registry-mirror:utils:load-manifest')
-const saveManifest = require('./save-manifest')
-const replaceTarballUrls = require('./replace-tarball-urls')
-
-const loadManifest = async (options, ipfs, packageName) => {
- let mfsVersion = {}
- let npmVersion = {}
-
- const mfsPath = `${options.store.baseDir}/${packageName}`
- const npmUrl = `${options.mirror.registry}/${packageName}`
-
- try {
- mfsVersion = JSON.parse(await ipfs.files.read(mfsPath))
- } catch (error) {
- if (error.message.includes('file does not exist')) {
- log(`${mfsPath} not in MFS`)
- } else {
- log(`Could not read ${mfsPath}`, error)
- }
- }
-
- const modified = new Date((mfsVersion.time && mfsVersion.time.modified) || 0)
- const willDownload = (Date.now() - options.mirror.registryUpdateInterval) > modified.getTime()
-
- if (willDownload) {
- try {
- log(`Fetching ${npmUrl}`)
- const start = Date.now()
- npmVersion = await request(Object.assign({}, options.request, {
- uri: npmUrl,
- json: true
- }))
- log(`Fetched ${npmUrl} in ${Date.now() - start}ms`)
- } catch (error) {
- log(`Could not download ${npmUrl}`, error)
- }
- }
-
- if (!mfsVersion._rev && !npmVersion._rev) {
- throw new Error(`Not found, tried npm: ${willDownload}`)
- }
-
- if (mfsVersion._rev && (!npmVersion._rev || npmVersion._rev === mfsVersion._rev)) {
- // we have a cached version and either fetching from npm failed or
- // our cached version matches the npm version
- return mfsVersion
- }
-
- console.info(`🆕 New version of ${packageName} detected`)
-
- npmVersion = replaceTarballUrls(options, npmVersion)
-
- // save our existing versions so we don't re-download tarballs we already have
- Object.keys(mfsVersion.versions || {}).forEach(versionNumber => {
- npmVersion.versions[versionNumber] = mfsVersion.versions[versionNumber]
- })
-
- // store it for next time
- await saveManifest(npmVersion, ipfs, options)
-
- return npmVersion
-}
-
-module.exports = loadManifest
diff --git a/src/core/utils/load-tarball.js b/src/core/utils/load-tarball.js
deleted file mode 100644
index 14144aae..00000000
--- a/src/core/utils/load-tarball.js
+++ /dev/null
@@ -1,62 +0,0 @@
-'use strict'
-
-const saveTarball = require('../clone/save-tarball')
-const CID = require('cids')
-const loadManifest = require('../utils/load-manifest')
-
-const readOrDownloadTarball = async (options, ipfs, path, emitter) => {
- const {
- packageName,
- packageVersion
- } = extractPackageDetails(path)
-
- let manifest = await loadManifest(options, ipfs, packageName)
- let version = manifest.versions[packageVersion]
-
- if (!version) {
- throw new Error(`Could not find version ${packageName}@${packageVersion} in available versions ${Object.keys(manifest.versions)}`)
- }
-
- if (!version.dist.cid) {
- return saveTarball(options, manifest.name, packageVersion, ipfs, emitter)
- }
-
- if (!version.dist.cid) {
- throw new Error(`CID for ${packageName}@${packageVersion} missing after download`)
- }
-
- let v0Cid
-
- try {
- v0Cid = new CID(version.dist.cid).toV0().toBaseEncodedString()
- } catch (error) {
- throw new Error(`Could not turn ${version.dist.cid} into a CID - ${error.stack}`)
- }
-
- return ipfs.files.readReadableStream(`/ipfs/${v0Cid}`)
-}
-
-const extractPackageDetails = (path) => {
- let [
- packageName, fileName
- ] = path.split('/-/')
-
- if (packageName.startsWith('/')) {
- packageName = packageName.substring(1)
- }
-
- let moduleName = packageName
-
- if (packageName.startsWith('@')) {
- moduleName = packageName.split('/').pop()
- }
-
- const packageVersion = fileName.substring(moduleName.length + 1, fileName.length - 4)
-
- return {
- packageName,
- packageVersion
- }
-}
-
-module.exports = readOrDownloadTarball
diff --git a/src/core/utils/replace-tarball-urls.js b/src/core/utils/replace-tarball-urls.js
deleted file mode 100644
index 35caae51..00000000
--- a/src/core/utils/replace-tarball-urls.js
+++ /dev/null
@@ -1,22 +0,0 @@
-'use strict'
-
-const getExternalUrl = require('./get-external-url')
-
-const replaceTarballUrls = (options, pkg) => {
- const prefix = getExternalUrl(options)
- const packageName = pkg.name
- const moduleName = packageName.startsWith('@') ? packageName.split('/').pop() : packageName
-
- // change tarball URLs to point to us
- Object.keys(pkg.versions || {})
- .forEach(versionNumber => {
- const version = pkg.versions[versionNumber]
-
- version.dist.source = version.dist.tarball
- version.dist.tarball = `${prefix}/${packageName}/-/${moduleName}-${versionNumber}.tgz`
- })
-
- return pkg
-}
-
-module.exports = replaceTarballUrls
diff --git a/src/core/utils/retry-request.js b/src/core/utils/retry-request.js
deleted file mode 100644
index 71236d2b..00000000
--- a/src/core/utils/retry-request.js
+++ /dev/null
@@ -1,60 +0,0 @@
-'use strict'
-
-const requestPromise = require('request-promise')
-const request = require('request')
-const {
- PassThrough
-} = require('stream')
-
-const makeRequest = (options) => {
- if (options.json) {
- return requestPromise(options)
- }
-
- // resolve with stream
- return new Promise((resolve, reject) => {
- const output = new PassThrough()
-
- const stream = request(options)
- stream.on('response', (response) => {
- if (response.statusCode < 200 || response.statusCode > 299) {
- return reject(new Error(`${options.url} - ${response.statusCode}`))
- }
- })
- stream.on('error', (error) => {
- reject(error)
- })
- stream.once('data', (data) => {
- resolve(output)
- })
- stream.pipe(output)
- })
-}
-
-const retryRequest = (options, attempt = 1) => {
- const maxAttempts = options.retries || 1
- const delay = options.retryDelay || 0
-
- return makeRequest(options)
- .catch(error => {
- const method = (options.method || 'GET').toUpperCase()
-
- console.info(`🚨 Request to ${method} ${options.uri} failed on attempt ${attempt} - ${error}`)
-
- attempt += 1
-
- if (attempt > maxAttempts) {
- return Promise.reject(new Error(`Gave up requesting ${method} ${options.uri} after ${attempt} attempts`))
- }
-
- return new Promise((resolve, reject) => {
- setTimeout(() => {
- retryRequest(options, attempt)
- .then(resolve)
- .catch(reject)
- }, delay)
- })
- })
-}
-
-module.exports = retryRequest
diff --git a/src/core/utils/s3-repo.js b/src/core/utils/s3-repo.js
deleted file mode 100644
index 68315a31..00000000
--- a/src/core/utils/s3-repo.js
+++ /dev/null
@@ -1,40 +0,0 @@
-'use strict'
-
-const S3 = require('aws-sdk/clients/s3')
-const S3Store = require('datastore-s3')
-const S3Lock = require('datastore-s3/examples/full-s3-repo/s3-lock')
-const IPFSRepo = require('ipfs-repo')
-
-const s3Repo = ({ region, bucket, path, accessKeyId, secretAccessKey, createIfMissing }) => {
- path = path || process.env.HOSTNAME
-
- const storeOptions = {
- s3: new S3({
- params: {
- Bucket: bucket
- },
- region,
- accessKeyId,
- secretAccessKey
- }),
- createIfMissing
- }
-
- return new IPFSRepo(path, {
- storageBackends: {
- root: S3Store,
- blocks: S3Store,
- keys: S3Store,
- datastore: S3Store
- },
- storageBackendOptions: {
- root: storeOptions,
- blocks: storeOptions,
- keys: storeOptions,
- datastore: storeOptions
- },
- lock: new S3Lock(new S3Store('', storeOptions))
- })
-}
-
-module.exports = s3Repo
diff --git a/src/core/utils/sanitise-name.js b/src/core/utils/sanitise-name.js
deleted file mode 100644
index 1659d4ff..00000000
--- a/src/core/utils/sanitise-name.js
+++ /dev/null
@@ -1,17 +0,0 @@
-'use strict'
-
-const sanitiseName = (name) => {
- name = `${(name || '').trim()}`.replace(/^(\/)+/, '/')
-
- if (name.startsWith('/')) {
- name = name.substring(1)
- }
-
- if (name.startsWith('@')) {
- name = name.replace(/%2f/g, '/')
- }
-
- return name
-}
-
-module.exports = sanitiseName
diff --git a/src/core/utils/save-manifest.js b/src/core/utils/save-manifest.js
deleted file mode 100644
index 1965e121..00000000
--- a/src/core/utils/save-manifest.js
+++ /dev/null
@@ -1,27 +0,0 @@
-'use strict'
-
-const log = require('debug')('ipfs:registry-mirror:utils:save-manifest')
-
-const saveManifest = async (pkg, ipfs, options) => {
- if (!pkg.name || pkg.error) {
- const error = pkg.error || new Error('No name found in package.json')
-
- throw error
- }
-
- const file = `${options.store.baseDir}/${pkg.name}`
-
- log(`Writing json for ${pkg.name} to ${file}`)
-
- return ipfs.files.write(file, Buffer.from(JSON.stringify(pkg)), {
- create: true,
- truncate: true,
- parents: true,
- flush: options.store.flush
- })
- .then(() => {
- log(`Wrote manifest for ${pkg.name} to ${file}`)
- })
-}
-
-module.exports = saveManifest
diff --git a/src/index.js b/src/index.js
deleted file mode 100644
index 6f0c13d7..00000000
--- a/src/index.js
+++ /dev/null
@@ -1,5 +0,0 @@
-'use strict'
-
-module.exports = {
-
-}
diff --git a/test/clone.spec.js b/test/clone.spec.js
deleted file mode 100644
index f9ce0b96..00000000
--- a/test/clone.spec.js
+++ /dev/null
@@ -1,268 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const mock = require('mock-require')
-const sinon = require('sinon')
-const config = require('../src/core/config')
-const createIpfs = require('./fixtures/create-ipfs')
-const {
- createTestServer,
- destroyTestServers
-} = require('./fixtures/test-server')
-const invoke = require('./fixtures/invoke')
-const expect = require('chai')
- .use(require('dirty-chai'))
- .expect
-const hat = require('hat')
-const saveManifest = require('../src/core/utils/save-manifest')
-
-const baseDir = '/commons-registry-clone-test'
-
-describe('clone', function () {
- this.timeout(10000)
- let clone
- let follow
- let ipfs
-
- before(async () => {
- ipfs = await createIpfs()
- })
-
- after(async () => {
- await ipfs.stop()
- })
-
- beforeEach(async () => {
- follow = sinon.stub()
- mock('follow-registry', follow)
- clone = mock.reRequire('../src/core/clone')
- })
-
- afterEach(async () => {
- mock.stopAll()
-
- await destroyTestServers()
- })
-
- const options = (overrides = {}) => {
- return Object.assign({
- ipfsBaseDir: baseDir,
- eagerDownload: false,
- externalHost: 'registry-host',
- externalPort: 443,
- externalProtocol: 'https',
- ipfsFlush: true
- }, overrides)
- }
-
- it('should eagerly download a new module', async () => {
- const tarballPath = '/new-module/-/new-module-1.0.0.tgz'
- const tarballContent = 'I am some binary'
-
- const server = await createTestServer({
- [tarballPath]: tarballContent
- })
-
- const cloner = clone(config(options({
- eagerDownload: true
- })), ipfs)
-
- const handler = follow.getCall(0).args[0].handler
- const data = {
- json: {
- name: 'new-module',
- _rev: '12345',
- versions: {
- '0.0.1': {
- dist: {
- tarball: `http://127.0.0.1:${server.address().port}${tarballPath}`,
- shasum: '3f9f726832b39c2cc7ac515c8a6c97b94b608b0e'
- }
- }
- }
- }
- }
-
- invoke(handler, data)
-
- return new Promise((resolve, reject) => {
- cloner.once('processed', (event) => {
- try {
- expect(event.name).to.equal('new-module')
- expect(Object.keys(event.versions).length).to.equal(1)
- expect(event.versions['0.0.1'].dist.source).to.equal(`http://127.0.0.1:${server.address().port}${tarballPath}`)
- expect(event.versions['0.0.1'].dist.tarball).to.equal(`https://registry-host/new-module/-/new-module-0.0.1.tgz`)
- } catch (error) {
- return reject(error)
- }
-
- resolve()
- })
- })
- })
-
- it('should not eagerly download a new module', async () => {
- const tarballPath = '/new-module/-/1.0.0/new-module-1.0.0.tar.gz'
- const tarballContent = 'I am some binary'
-
- const server = await createTestServer({
- [tarballPath]: tarballContent
- })
-
- const cloner = clone(config(options({
- eagerDownload: false
- })), ipfs)
-
- const handler = follow.getCall(0).args[0].handler
- const data = {
- json: {
- name: 'new-module',
- _rev: '12345',
- versions: {
- '0.0.1': {
- dist: {
- tarball: `http://127.0.0.1:${server.address().port}${tarballPath}`,
- shasum: '123'
- }
- }
- }
- }
- }
-
- invoke(handler, data)
-
- return new Promise((resolve, reject) => {
- cloner.once('processed', (event) => {
- try {
- expect(event.name).to.equal('new-module')
- } catch (error) {
- return reject(error)
- }
-
- resolve()
- })
- })
- })
-
- it('should survive an invalid update', (done) => {
- clone(config(options()), ipfs)
-
- const handler = follow.getCall(0).args[0].handler
- const data = {}
-
- handler(data, () => {
- done()
- })
- })
-
- it('should survive npm 503ing', (done) => {
- clone(config(options()), ipfs)
-
- const handler = follow.getCall(0).args[0].handler
- const data = {
- json: '503 Service Unavailable \nNo server is available to handle this request.\n\n\n',
- versions: [],
- tarballs: [],
- seq: 283813
- }
-
- handler(data, () => {
- done()
- })
- })
-
- it('should survive npm 504ing', (done) => {
- clone(config(options()), ipfs)
-
- const handler = follow.getCall(0).args[0].handler
- const data = {
- json: '504 Gateway Time-out \nThe server didn\'t respond in time.\n\n\n',
- versions: [],
- tarballs: [],
- seq: 283813
- }
-
- handler(data, () => {
- done()
- })
- })
-
- it('should survive npm 404ing', (done) => {
- clone(config(options()), ipfs)
-
- const handler = follow.getCall(0).args[0].handler
- const data = {
- json: {
- error: 'not_found',
- reason: 'missing'
- },
- versions: [],
- tarballs: [],
- seq: 50390
- }
-
- handler(data, () => {
- done()
- })
- })
-
- it('should not download a tarball that already exists', async () => {
- const moduleName = `my-module-${hat()}`
- const tarballPath = `/${moduleName}/-/${moduleName}-1.0.0.tar.gz`
- const manifest = {
- name: moduleName,
- _rev: '12345',
- versions: {
- '1.0.0': {
- dist: {
- tarball: `http://127.0.0.1:8080${tarballPath}`,
- source: `http://127.0.0.1:8080${tarballPath}`,
- cid: 'QmZVQm5euZa69LtUFt8HuuBPSpLYMxcxACh6F5M8ZqpbR9',
- shasum: '123'
- }
- }
- }
- }
-
- await saveManifest(manifest, ipfs, {
- store: {
- baseDir,
- flush: true
- }
- })
-
- const cloner = clone(config(options({
- eagerDownload: true
- })), ipfs)
-
- const handler = follow.getCall(0).args[0].handler
- const data = {
- json: {
- name: moduleName,
- _rev: '12345',
- versions: {
- '1.0.0': {
- dist: {
- tarball: `http://127.0.0.1:8080${tarballPath}`,
- shasum: '123'
- }
- }
- }
- }
- }
-
- invoke(handler, data)
-
- return new Promise((resolve, reject) => {
- cloner.once('processed', (event) => {
- try {
- expect(event.name).to.equal(moduleName)
- } catch (error) {
- return reject(error)
- }
-
- resolve()
- })
- })
- })
-})
diff --git a/test/config.spec.js b/test/config.spec.js
deleted file mode 100644
index ffc34e98..00000000
--- a/test/config.spec.js
+++ /dev/null
@@ -1,36 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const config = require('../src/core/config')
-const expect = require('chai')
- .use(require('dirty-chai'))
- .expect
-
-describe('config', () => {
- it('should respect options', () => {
- const result = config.option(undefined, undefined, 1)
-
- expect(result).to.equal(1)
- })
-
- it('should respect options with NaN', () => {
- const result = config.option(undefined, NaN, 1)
-
- expect(result).to.equal(1)
- })
-
- it('should coerce to boolean', () => {
- expect(config.toBoolean('true')).to.equal(true)
- expect(config.toBoolean('false')).to.equal(false)
- expect(config.toBoolean('1')).to.equal(true)
- expect(config.toBoolean('0')).to.equal(false)
- expect(config.toBoolean('yes')).to.equal(true)
- expect(config.toBoolean('no')).to.equal(false)
- expect(config.toBoolean(true)).to.equal(true)
- expect(config.toBoolean(false)).to.equal(false)
- })
-
- it('should not coerce undefined', () => {
- expect(config.toBoolean(undefined)).to.equal(undefined)
- })
-})
diff --git a/test/core.spec.js b/test/core.spec.js
deleted file mode 100644
index 3262beed..00000000
--- a/test/core.spec.js
+++ /dev/null
@@ -1,354 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const promisify = require('util').promisify
-const mock = require('mock-require')
-const request = require('request-promise')
-const {
- createTestServer,
- destroyTestServers
-} = require('./fixtures/test-server')
-const expect = require('chai')
- .use(require('dirty-chai'))
- .expect
-const createDagNode = promisify(require('ipld-dag-pb').DAGNode.create)
-const UnixFS = require('ipfs-unixfs')
-const pkg = require('../package.json')
-const path = require('path')
-const os = require('os')
-const hat = require('hat')
-const delay = require('promise-delay')
-
-describe('core', function () {
- this.timeout(10000)
- const baseDir = '/commons-registry-test'
- let startMirror
- let mirror
- let mirrorUrl
- let upstreamModules = {}
- let options
-
- const serverOptions = (registry, options = {}) => {
- return Object.assign({}, {
- mirrorProtocol: 'http',
- mirrorHost: '127.0.0.1',
- mirrorRegistry: `http://127.0.0.1:${registry.address().port}`,
- requestRetries: 5,
- requestRetryDelay: 100,
- ipfsBaseDir: baseDir,
- requestTimeout: 1000,
- ipfsRepo: path.join(os.tmpdir(), hat()),
- ipfsFlush: true,
- registryUpdateInterval: 0
- }, options)
- }
-
- before(async () => {
- startMirror = mock.reRequire('../src/core')
-
- let registryServer = await createTestServer(upstreamModules)
- options = serverOptions(registryServer)
-
- mirror = await startMirror(options)
-
- options.mirrorPort = mirror.server.address().port
-
- mirrorUrl = `${options.mirrorProtocol}://${options.mirrorHost}:${options.mirrorPort}`
- })
-
- after(async function () {
- mock.stopAll()
-
- await destroyTestServers()
-
- if (mirror && mirror.stop) {
- await mirror.stop()
- }
- })
-
- it('should serve a manifest', async function () {
- const moduleName = `module-${hat()}`
- const content = JSON.stringify({
- _rev: '12345',
- name: moduleName,
- versions: {}
- })
-
- await mirror.app.locals.ipfs.files.write(`${baseDir}/${moduleName}`, Buffer.from(content), {
- parents: true,
- create: true,
- truncate: true
- })
-
- const result = await request({
- uri: `${mirrorUrl}/${moduleName}`
- })
-
- expect(result).to.equal(content)
- })
-
- it('should serve a tarball', async () => {
- const moduleName = `module-${hat()}`
- const tarballContent = 'tarball-content'
- const fsNode = UnixFS('file', Buffer.from(tarballContent))
-
- const node = await createDagNode(fsNode.marshal())
-
- await mirror.app.locals.ipfs.dag.put(node, {
- cid: node._cid
- })
-
- const manifest = JSON.stringify({
- _rev: '12345',
- name: moduleName,
- versions: {
- '1.0.0': {
- dist: {
- cid: node._cid.toBaseEncodedString()
- }
- }
- }
- })
-
- await mirror.app.locals.ipfs.files.write(`${baseDir}/${moduleName}`, Buffer.from(manifest), {
- parents: true,
- create: true,
- truncate: true
- })
-
- const result = await request({
- uri: `${mirrorUrl}/${moduleName}/-/${moduleName}-1.0.0.tgz`
- })
-
- expect(result).to.equal(tarballContent)
- })
-
- it('should serve some basic info', async () => {
- const result = JSON.parse(await request({
- uri: `${mirrorUrl}`
- }))
-
- expect(result.name).to.equal(pkg.name)
- expect(result.version).to.equal(pkg.version)
- })
-
- it('should download a missing manifest', async () => {
- const moduleName = `module-${hat()}`
- const data = JSON.stringify({
- _rev: '12345',
- name: moduleName,
- versions: {}
- })
-
- upstreamModules[`/${moduleName}`] = (request, response) => {
- response.statusCode = 200
- response.end(data)
- }
-
- const result = await request({
- uri: `${mirrorUrl}/${moduleName}`
- })
-
- expect(result.trim()).to.equal(data.trim())
- })
-
- it('should download a missing tarball from an existing module', async () => {
- const moduleName = `module-${hat()}`
- const tarballPath = `${moduleName}/-/${moduleName}-1.0.0.tgz`
- const tarballContent = 'tarball content'
- const manifest = JSON.stringify({
- _rev: '12345',
- name: moduleName,
- versions: {
- '1.0.0': {
- dist: {
- tarball: `${options.mirrorRegistry}/${tarballPath}`,
- shasum: '15d0e36e27c69bc758231f8e9add837f40a40cd0'
- }
- }
- }
- })
-
- upstreamModules[`/${moduleName}`] = (request, response) => {
- response.statusCode = 200
- response.end(manifest)
- }
- upstreamModules[`/${tarballPath}`] = (request, response) => {
- response.statusCode = 200
- response.end(tarballContent)
- }
-
- const result = await request({
- uri: `${mirrorUrl}/${tarballPath}`
- })
-
- expect(result).to.equal(tarballContent)
- })
-
- it('should download a manifest from a missing scoped module', async () => {
- const moduleName = `@my-scope/module-${hat()}`
- const data = JSON.stringify({
- _rev: '12345',
- name: moduleName,
- versions: {}
- })
-
- upstreamModules[`/${moduleName}`] = (request, response) => {
- response.statusCode = 200
- response.end(data)
- }
-
- const result = await request({
- uri: `${mirrorUrl}/${moduleName.replace('/', '%2f')}`
- })
-
- expect(result.trim()).to.equal(data.trim())
- })
-
- it('should check with the upstream registry for updated versions', async () => {
- const moduleName = `module-${hat()}`
- const tarball1Path = `${moduleName}/-/${moduleName}-1.0.0.tgz`
- const tarball2Path = `${moduleName}/-/${moduleName}-2.0.0.tgz`
- const tarball1Content = 'tarball 1 content'
- const tarball2Content = 'tarball 2 content'
- const manifest1 = JSON.stringify({
- _rev: '12345-1',
- name: moduleName,
- versions: {
- '1.0.0': {
- dist: {
- shasum: '669965318736dfe855479a6dd441d81f101ae5ae',
- tarball: `${options.mirrorRegistry}/${tarball1Path}`
- }
- }
- }
- })
- const manifest2 = JSON.stringify({
- _rev: '12345-2',
- name: moduleName,
- versions: {
- '1.0.0': {
- dist: {
- shasum: '669965318736dfe855479a6dd441d81f101ae5ae',
- tarball: `${options.mirrorRegistry}/${tarball1Path}`
- }
- },
- '2.0.0': {
- dist: {
- shasum: '4e9dab818d5f0a45e4ded14021cf0bc28c456f74',
- tarball: `${options.mirrorRegistry}/${tarball2Path}`
- }
- }
- }
- })
- let invocations = 0
-
- upstreamModules[`/${moduleName}`] = (request, response) => {
- response.statusCode = 200
- invocations++
-
- if (invocations === 1) {
- response.end(manifest1)
- } else {
- response.end(manifest2)
- }
- }
- upstreamModules[`/${tarball1Path}`] = (request, response) => {
- response.statusCode = 200
- response.end(tarball1Content)
- }
- upstreamModules[`/${tarball2Path}`] = (request, response) => {
- response.statusCode = 200
- response.end(tarball2Content)
- }
-
- const result1 = await request({
- uri: `${mirrorUrl}/${tarball1Path}`
- })
- const result2 = await request({
- uri: `${mirrorUrl}/${tarball2Path}`
- })
-
- expect(result1).to.equal(tarball1Content)
- expect(result2).to.equal(tarball2Content)
- })
-
- it('should proxy all other requests to the registry', async () => {
- let data = 'hello world'
-
- upstreamModules['/-/user/org.couchdb.user:dave'] = data
-
- const result = await request({
- uri: `${mirrorUrl}/-/user/org.couchdb.user:dave`,
- method: 'put'
- })
-
- expect(result.trim()).to.equal(data.trim())
- })
-
- it('should retry when 404s are encountered', async () => {
- const moduleName = `module-404-${hat()}`
- const data = JSON.stringify({
- name: moduleName,
- _rev: '12345',
- versions: {}
- })
- let invocations = 0
-
- upstreamModules[`/${moduleName}`] = (request, response) => {
- invocations++
-
- if (invocations === 1) {
- response.statusCode = 404
- return response.end('404')
- }
-
- response.statusCode = 200
- return response.end(data)
- }
-
- await request({
- uri: `${mirrorUrl}/${moduleName}`
- })
-
- expect(invocations).to.equal(2)
- })
-
- it('should not save tarball CID when shasums do not match', async () => {
- const moduleName = `module-${hat()}`
- const tarballPath = `${moduleName}/-/${moduleName}-1.0.0.tgz`
- const tarballContent = 'tarball content'
- const manifest = JSON.stringify({
- _rev: '12345',
- name: moduleName,
- versions: {
- '1.0.0': {
- dist: {
- tarball: `${options.mirrorRegistry}/${tarballPath}`,
- shasum: 'nope!'
- }
- }
- }
- })
-
- upstreamModules[`/${moduleName}`] = (request, response) => {
- response.statusCode = 200
- response.end(manifest)
- }
- upstreamModules[`/${tarballPath}`] = (request, response) => {
- response.statusCode = 200
- response.end(tarballContent)
- }
-
- await request({
- uri: `${mirrorUrl}/${tarballPath}`
- })
-
- // let the download be processed
- await delay(1000)
-
- const updated = JSON.parse(await mirror.app.locals.ipfs.files.read(`${baseDir}/${moduleName}`))
-
- expect(updated.versions['1.0.0'].dist.cid).to.not.be.ok()
- })
-})
diff --git a/test/fixtures/create-ipfs.js b/test/fixtures/create-ipfs.js
deleted file mode 100644
index 8e36db98..00000000
--- a/test/fixtures/create-ipfs.js
+++ /dev/null
@@ -1,17 +0,0 @@
-'use strict'
-
-const IPFS = require('ipfs')
-const hat = require('hat')
-const os = require('os')
-const path = require('path')
-
-module.exports = () => {
- return new Promise((resolve, reject) => {
- const ipfs = new IPFS({
- repo: path.join(os.tmpdir(), hat())
- })
-
- ipfs.once('ready', () => resolve(ipfs))
- ipfs.once('error', (error) => reject(error))
- })
-}
diff --git a/test/fixtures/create-module-update.js b/test/fixtures/create-module-update.js
deleted file mode 100644
index df0df8b2..00000000
--- a/test/fixtures/create-module-update.js
+++ /dev/null
@@ -1,11 +0,0 @@
-'use strict'
-
-// the sort of data structure emitted by follow-registry
-
-module.exports = (name, versions) => {
- return {
- seq: 5,
- name: name,
- versions
- }
-}
diff --git a/test/fixtures/create-write-stream.js b/test/fixtures/create-write-stream.js
deleted file mode 100644
index 2c68d9f3..00000000
--- a/test/fixtures/create-write-stream.js
+++ /dev/null
@@ -1,13 +0,0 @@
-'use strict'
-
-const sinon = require('sinon')
-
-module.exports = () => {
- return {
- write: sinon.stub(),
- end: sinon.stub(),
- on: sinon.stub(),
- emit: sinon.stub(),
- removeListener: sinon.stub()
- }
-}
diff --git a/test/fixtures/invoke.js b/test/fixtures/invoke.js
deleted file mode 100644
index c212b5f3..00000000
--- a/test/fixtures/invoke.js
+++ /dev/null
@@ -1,15 +0,0 @@
-'use strict'
-
-module.exports = (func, ...args) => {
- return new Promise((resolve, reject) => {
- args.push((error, result) => {
- if (error) {
- return reject(error)
- }
-
- resolve(result)
- })
-
- func.apply(null, args)
- })
-}
diff --git a/test/fixtures/package.json b/test/fixtures/package.json
new file mode 100644
index 00000000..742701a1
--- /dev/null
+++ b/test/fixtures/package.json
@@ -0,0 +1,8 @@
+{
+ "name": "a-project",
+ "version": "1.0.0",
+ "description": "A really nice project",
+ "dependencies": {
+ "express": "^4.16.4"
+ }
+}
diff --git a/test/fixtures/test-server.js b/test/fixtures/test-server.js
deleted file mode 100644
index 9bfa6242..00000000
--- a/test/fixtures/test-server.js
+++ /dev/null
@@ -1,52 +0,0 @@
-'use strict'
-
-const http = require('http')
-
-let testServers = []
-
-module.exports = {
- createTestServer: (resources) => {
- return new Promise((resolve, reject) => {
- const server = http.createServer((request, response) => {
- if (resources[request.url]) {
- if (typeof resources[request.url] === 'function') {
- return resources[request.url](request, response)
- }
-
- response.statusCode = 200
- return response.end(resources[request.url])
- }
-
- response.statusCode = 404
- response.end('404')
- })
-
- server.listen((error) => {
- if (error) {
- return reject(error)
- }
-
- testServers.push(server)
-
- if (typeof resources === 'function') {
- resources = resources(server)
- }
-
- resolve(server)
- })
- })
- },
-
- destroyTestServers: () => {
- const servers = testServers
- testServers = []
-
- return Promise.all(
- servers.map((server) => {
- return new Promise((resolve) => {
- server.close(resolve)
- })
- })
- )
- }
-}
diff --git a/test/install.spec.js b/test/install.spec.js
new file mode 100644
index 00000000..c09740ff
--- /dev/null
+++ b/test/install.spec.js
@@ -0,0 +1,61 @@
+/* eslint-env mocha */
+'use strict'
+
+const promisify = require('util').promisify
+const fs = {
+ mkdir: promisify(require('fs').mkdir),
+ copyFile: promisify(require('fs').copyFile)
+}
+const path = require('path')
+const os = require('os')
+const hat = require('hat')
+const {
+ spawn
+} = require('child_process')
+var OutputBuffer = require('output-buffer')
+
+describe('install', function () {
+ this.timeout(120000)
+
+ let projectDirectory
+
+ before(async () => {
+ projectDirectory = path.join(os.tmpdir(), hat())
+
+ await fs.mkdir(projectDirectory)
+ await fs.copyFile(path.resolve(__dirname, './fixtures/package.json'), path.join(projectDirectory, 'package.json'))
+ })
+
+ it('should install a project', (done) => {
+ const installer = spawn(
+ process.argv[0], [
+ path.resolve(__dirname, '../src/cli/bin.js'),
+ `--ipfs-repo=${path.join(os.tmpdir(), hat())}`,
+ 'install'
+ ], {
+ cwd: projectDirectory
+ })
+
+ const buffer = new OutputBuffer((line) => {
+ console.info(line) // eslint-disable-line no-console
+ })
+
+ installer.stdout.on('data', (data) => {
+ buffer.append(data.toString())
+ })
+
+ installer.stderr.on('data', (data) => {
+ buffer.append(data.toString())
+ })
+
+ installer.on('close', async (code) => {
+ buffer.flush()
+
+ if (code === 0) {
+ return done()
+ }
+
+ done(new Error(`Unexpected exit code ${code}`))
+ })
+ })
+})
diff --git a/test/node.js b/test/node.js
index 70977b27..09906737 100644
--- a/test/node.js
+++ b/test/node.js
@@ -1,6 +1,3 @@
'use strict'
-require('./config.spec')
-require('./clone.spec')
-require('./core.spec')
-require('./pool.spec')
+require('./install.spec')
diff --git a/test/pool.spec.js b/test/pool.spec.js
deleted file mode 100644
index 09b030ab..00000000
--- a/test/pool.spec.js
+++ /dev/null
@@ -1,67 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const expect = require('chai')
- .use(require('dirty-chai'))
- .expect
-const createPool = require('../src/core/utils/create-pool')
-const delay = require('promise-delay')
-
-describe('pool', () => {
- it('should not execute the same task twice', async () => {
- const pool = createPool(5)
- let invocations = 0
-
- const task = () => {
- return new Promise((resolve) => {
- setTimeout(() => {
- invocations = invocations + 1
-
- resolve()
- }, 100)
- })
- }
- task.id = 'something-unique'
-
- await Promise.all([
- pool.addTask(task),
- pool.addTask(task),
- pool.addTask(task),
- pool.addTask(task),
- pool.addTask(task),
- pool.addTask(task)
- ])
-
- expect(invocations).to.equal(1)
- })
-
- it('should not exceed the concurrency limit', async () => {
- const pool = createPool(5)
- let running = 0
-
- const task = () => {
- return new Promise((resolve) => {
- running = running + 1
-
- setTimeout(() => {
- running = running - 1
-
- resolve()
- }, 1000)
- })
- }
-
- Promise.all([
- pool.addTask(task),
- pool.addTask(task),
- pool.addTask(task),
- pool.addTask(task),
- pool.addTask(task),
- pool.addTask(task)
- ])
-
- await delay(100)
-
- expect(running).to.equal(5)
- })
-})