mirror of
https://github.com/titanscouting/tra-analysis.git
synced 2025-09-06 15:07:21 +00:00
push all website files
This commit is contained in:
8
website/node_modules/npm/lib/install/access-error.js
generated
vendored
Normal file
8
website/node_modules/npm/lib/install/access-error.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
'use strict'
|
||||
module.exports = function (dir, er) {
|
||||
if (!er) return
|
||||
var accessEr = new Error("EACCES, access '" + dir + "'", -13)
|
||||
accessEr.code = 'EACCES'
|
||||
accessEr.path = dir
|
||||
return accessEr
|
||||
}
|
13
website/node_modules/npm/lib/install/action/build.js
generated
vendored
Normal file
13
website/node_modules/npm/lib/install/action/build.js
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
'use strict'
|
||||
var chain = require('slide').chain
|
||||
var build = require('../../build.js')
|
||||
var npm = require('../../npm.js')
|
||||
var packageId = require('../../utils/package-id.js')
|
||||
|
||||
module.exports = function (staging, pkg, log, next) {
|
||||
log.silly('build', packageId(pkg))
|
||||
chain([
|
||||
[build.linkStuff, pkg.package, pkg.path, npm.config.get('global')],
|
||||
[build.writeBuiltinConf, pkg.package, pkg.path]
|
||||
], next)
|
||||
}
|
18
website/node_modules/npm/lib/install/action/extract-worker.js
generated
vendored
Normal file
18
website/node_modules/npm/lib/install/action/extract-worker.js
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
const extract = require('pacote/extract')
|
||||
const npmlog = require('npmlog')
|
||||
|
||||
module.exports = (args, cb) => {
|
||||
const parsed = typeof args === 'string' ? JSON.parse(args) : args
|
||||
const spec = parsed[0]
|
||||
const extractTo = parsed[1]
|
||||
const opts = parsed[2]
|
||||
if (!opts.log) {
|
||||
opts.log = npmlog
|
||||
}
|
||||
opts.log.level = opts.loglevel || opts.log.level
|
||||
BB.resolve(extract(spec, extractTo, opts)).nodeify(cb)
|
||||
}
|
136
website/node_modules/npm/lib/install/action/extract.js
generated
vendored
Normal file
136
website/node_modules/npm/lib/install/action/extract.js
generated
vendored
Normal file
@@ -0,0 +1,136 @@
|
||||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
const stat = BB.promisify(require('graceful-fs').stat)
|
||||
const gentlyRm = BB.promisify(require('../../utils/gently-rm.js'))
|
||||
const mkdirp = BB.promisify(require('mkdirp'))
|
||||
const moduleStagingPath = require('../module-staging-path.js')
|
||||
const move = require('../../utils/move.js')
|
||||
const npa = require('npm-package-arg')
|
||||
const npm = require('../../npm.js')
|
||||
const packageId = require('../../utils/package-id.js')
|
||||
let pacoteOpts
|
||||
const path = require('path')
|
||||
const localWorker = require('./extract-worker.js')
|
||||
const workerFarm = require('worker-farm')
|
||||
const isRegistry = require('../../utils/is-registry.js')
|
||||
|
||||
const WORKER_PATH = require.resolve('./extract-worker.js')
|
||||
let workers
|
||||
|
||||
// NOTE: temporarily disabled on non-OSX due to ongoing issues:
|
||||
//
|
||||
// * Seems to make Windows antivirus issues much more common
|
||||
// * Messes with Docker (I think)
|
||||
//
|
||||
// There are other issues that should be fixed that affect OSX too:
|
||||
//
|
||||
// * Logging is messed up right now because pacote does its own thing
|
||||
// * Global deduplication in pacote breaks due to multiple procs
|
||||
//
|
||||
// As these get fixed, we can start experimenting with re-enabling it
|
||||
// at least on some platforms.
|
||||
const ENABLE_WORKERS = process.platform === 'darwin'
|
||||
|
||||
extract.init = () => {
|
||||
if (ENABLE_WORKERS) {
|
||||
workers = workerFarm({
|
||||
maxConcurrentCallsPerWorker: npm.limit.fetch,
|
||||
maxRetries: 1
|
||||
}, WORKER_PATH)
|
||||
}
|
||||
return BB.resolve()
|
||||
}
|
||||
extract.teardown = () => {
|
||||
if (ENABLE_WORKERS) {
|
||||
workerFarm.end(workers)
|
||||
workers = null
|
||||
}
|
||||
return BB.resolve()
|
||||
}
|
||||
module.exports = extract
|
||||
function extract (staging, pkg, log) {
|
||||
log.silly('extract', packageId(pkg))
|
||||
const extractTo = moduleStagingPath(staging, pkg)
|
||||
if (!pacoteOpts) {
|
||||
pacoteOpts = require('../../config/pacote')
|
||||
}
|
||||
const opts = pacoteOpts({
|
||||
integrity: pkg.package._integrity,
|
||||
resolved: pkg.package._resolved
|
||||
})
|
||||
const args = [
|
||||
pkg.package._requested,
|
||||
extractTo,
|
||||
opts
|
||||
]
|
||||
return BB.fromNode((cb) => {
|
||||
let launcher = localWorker
|
||||
let msg = args
|
||||
const spec = typeof args[0] === 'string' ? npa(args[0]) : args[0]
|
||||
args[0] = spec.raw
|
||||
if (ENABLE_WORKERS && (isRegistry(spec) || spec.type === 'remote')) {
|
||||
// We can't serialize these options
|
||||
opts.loglevel = opts.log.level
|
||||
opts.log = null
|
||||
opts.dirPacker = null
|
||||
// workers will run things in parallel!
|
||||
launcher = workers
|
||||
try {
|
||||
msg = JSON.stringify(msg)
|
||||
} catch (e) {
|
||||
return cb(e)
|
||||
}
|
||||
}
|
||||
launcher(msg, cb)
|
||||
}).then(() => {
|
||||
if (pkg.package.bundleDependencies || anyBundled(pkg)) {
|
||||
return readBundled(pkg, staging, extractTo)
|
||||
}
|
||||
}).then(() => {
|
||||
return gentlyRm(path.join(extractTo, 'node_modules'))
|
||||
})
|
||||
}
|
||||
|
||||
function anyBundled (top, pkg) {
|
||||
if (!pkg) pkg = top
|
||||
return pkg.children.some((child) => child.fromBundle === top || anyBundled(top, child))
|
||||
}
|
||||
|
||||
function readBundled (pkg, staging, extractTo) {
|
||||
return BB.map(pkg.children, (child) => {
|
||||
if (!child.fromBundle) return
|
||||
if (child.error) {
|
||||
throw child.error
|
||||
} else {
|
||||
return stageBundledModule(pkg, child, staging, extractTo)
|
||||
}
|
||||
}, {concurrency: 10})
|
||||
}
|
||||
|
||||
function stageBundledModule (bundler, child, staging, parentPath) {
|
||||
const stageFrom = path.join(parentPath, 'node_modules', child.package.name)
|
||||
const stageTo = moduleStagingPath(staging, child)
|
||||
|
||||
return BB.map(child.children, (child) => {
|
||||
if (child.error) {
|
||||
throw child.error
|
||||
} else {
|
||||
return stageBundledModule(bundler, child, staging, stageFrom)
|
||||
}
|
||||
}).then(() => {
|
||||
return finishModule(bundler, child, stageTo, stageFrom)
|
||||
})
|
||||
}
|
||||
|
||||
function finishModule (bundler, child, stageTo, stageFrom) {
|
||||
// If we were the one's who bundled this module…
|
||||
if (child.fromBundle === bundler) {
|
||||
return mkdirp(path.dirname(stageTo)).then(() => {
|
||||
return move(stageFrom, stageTo)
|
||||
})
|
||||
} else {
|
||||
return stat(stageFrom).then(() => gentlyRm(stageFrom), () => {})
|
||||
}
|
||||
}
|
16
website/node_modules/npm/lib/install/action/fetch.js
generated
vendored
Normal file
16
website/node_modules/npm/lib/install/action/fetch.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
const finished = BB.promisify(require('mississippi').finished)
|
||||
const packageId = require('../../utils/package-id.js')
|
||||
const pacote = require('pacote')
|
||||
const pacoteOpts = require('../../config/pacote')
|
||||
|
||||
module.exports = fetch
|
||||
function fetch (staging, pkg, log, next) {
|
||||
log.silly('fetch', packageId(pkg))
|
||||
const opts = pacoteOpts({integrity: pkg.package._integrity})
|
||||
return finished(pacote.tarball.stream(pkg.package._requested, opts))
|
||||
.then(() => next(), next)
|
||||
}
|
106
website/node_modules/npm/lib/install/action/finalize.js
generated
vendored
Normal file
106
website/node_modules/npm/lib/install/action/finalize.js
generated
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
'use strict'
|
||||
const path = require('path')
|
||||
const fs = require('graceful-fs')
|
||||
const Bluebird = require('bluebird')
|
||||
const rimraf = Bluebird.promisify(require('rimraf'))
|
||||
const mkdirp = Bluebird.promisify(require('mkdirp'))
|
||||
const lstat = Bluebird.promisify(fs.lstat)
|
||||
const readdir = Bluebird.promisify(fs.readdir)
|
||||
const symlink = Bluebird.promisify(fs.symlink)
|
||||
const gentlyRm = Bluebird.promisify(require('../../utils/gently-rm'))
|
||||
const moduleStagingPath = require('../module-staging-path.js')
|
||||
const move = require('move-concurrently')
|
||||
const moveOpts = {fs: fs, Promise: Bluebird, maxConcurrency: 4}
|
||||
const getRequested = require('../get-requested.js')
|
||||
const log = require('npmlog')
|
||||
const packageId = require('../../utils/package-id.js')
|
||||
|
||||
module.exports = function (staging, pkg, log) {
|
||||
log.silly('finalize', pkg.realpath)
|
||||
|
||||
const extractedTo = moduleStagingPath(staging, pkg)
|
||||
|
||||
const delpath = path.join(path.dirname(pkg.realpath), '.' + path.basename(pkg.realpath) + '.DELETE')
|
||||
let movedDestAway = false
|
||||
|
||||
const requested = pkg.package._requested || getRequested(pkg)
|
||||
if (requested.type === 'directory') {
|
||||
const relative = path.relative(path.dirname(pkg.path), pkg.realpath)
|
||||
return makeParentPath(pkg.path)
|
||||
.then(() => symlink(relative, pkg.path, 'junction'))
|
||||
.catch((ex) => {
|
||||
return rimraf(pkg.path).then(() => symlink(relative, pkg.path, 'junction'))
|
||||
})
|
||||
} else {
|
||||
return makeParentPath(pkg.realpath)
|
||||
.then(moveStagingToDestination)
|
||||
.then(restoreOldNodeModules)
|
||||
.catch((err) => {
|
||||
if (movedDestAway) {
|
||||
return rimraf(pkg.realpath).then(moveOldDestinationBack).then(() => {
|
||||
throw err
|
||||
})
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
})
|
||||
.then(() => rimraf(delpath))
|
||||
}
|
||||
|
||||
function makeParentPath (dir) {
|
||||
return mkdirp(path.dirname(dir))
|
||||
}
|
||||
|
||||
function moveStagingToDestination () {
|
||||
return destinationIsClear()
|
||||
.then(actuallyMoveStaging)
|
||||
.catch(() => moveOldDestinationAway().then(actuallyMoveStaging))
|
||||
}
|
||||
|
||||
function destinationIsClear () {
|
||||
return lstat(pkg.realpath).then(() => {
|
||||
throw new Error('destination exists')
|
||||
}, () => {})
|
||||
}
|
||||
|
||||
function actuallyMoveStaging () {
|
||||
return move(extractedTo, pkg.realpath, moveOpts)
|
||||
}
|
||||
|
||||
function moveOldDestinationAway () {
|
||||
return rimraf(delpath).then(() => {
|
||||
return move(pkg.realpath, delpath, moveOpts)
|
||||
}).then(() => { movedDestAway = true })
|
||||
}
|
||||
|
||||
function moveOldDestinationBack () {
|
||||
return move(delpath, pkg.realpath, moveOpts).then(() => { movedDestAway = false })
|
||||
}
|
||||
|
||||
function restoreOldNodeModules () {
|
||||
if (!movedDestAway) return
|
||||
return readdir(path.join(delpath, 'node_modules')).catch(() => []).then((modules) => {
|
||||
if (!modules.length) return
|
||||
return mkdirp(path.join(pkg.realpath, 'node_modules')).then(() => Bluebird.map(modules, (file) => {
|
||||
const from = path.join(delpath, 'node_modules', file)
|
||||
const to = path.join(pkg.realpath, 'node_modules', file)
|
||||
return move(from, to, moveOpts)
|
||||
}))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.rollback = function (top, staging, pkg) {
|
||||
return Bluebird.try(() => {
|
||||
const requested = pkg.package._requested || getRequested(pkg)
|
||||
if (requested && requested.type === 'directory') return Promise.resolve()
|
||||
// strictly speaking rolling back a finalize should ONLY remove module that
|
||||
// was being finalized, not any of the things under it. But currently
|
||||
// those modules are guaranteed to be useless so we may as well remove them too.
|
||||
// When/if we separate `commit` step and can rollback to previous versions
|
||||
// of upgraded modules then we'll need to revisit this…
|
||||
return gentlyRm(pkg.path, false, top).catch((err) => {
|
||||
log.warn('rollback', `Rolling back ${packageId(pkg)} failed (this is probably harmless): ${err.message ? err.message : err}`)
|
||||
})
|
||||
})
|
||||
}
|
17
website/node_modules/npm/lib/install/action/global-install.js
generated
vendored
Normal file
17
website/node_modules/npm/lib/install/action/global-install.js
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
'use strict'
|
||||
var path = require('path')
|
||||
var npm = require('../../npm.js')
|
||||
var Installer = require('../../install.js').Installer
|
||||
var packageId = require('../../utils/package-id.js')
|
||||
|
||||
module.exports = function (staging, pkg, log, next) {
|
||||
log.silly('global-install', packageId(pkg))
|
||||
var globalRoot = path.resolve(npm.globalDir, '..')
|
||||
npm.config.set('global', true)
|
||||
var install = new Installer(globalRoot, false, [pkg.package.name + '@' + pkg.package._requested.fetchSpec])
|
||||
install.link = false
|
||||
install.run(function () {
|
||||
npm.config.set('global', false)
|
||||
next.apply(null, arguments)
|
||||
})
|
||||
}
|
8
website/node_modules/npm/lib/install/action/global-link.js
generated
vendored
Normal file
8
website/node_modules/npm/lib/install/action/global-link.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
'use strict'
|
||||
var npm = require('../../npm.js')
|
||||
var packageId = require('../../utils/package-id.js')
|
||||
|
||||
module.exports = function (staging, pkg, log, next) {
|
||||
log.silly('global-link', packageId(pkg))
|
||||
npm.link(pkg.package.name, next)
|
||||
}
|
8
website/node_modules/npm/lib/install/action/install.js
generated
vendored
Normal file
8
website/node_modules/npm/lib/install/action/install.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
'use strict'
|
||||
var lifecycle = require('../../utils/lifecycle.js')
|
||||
var packageId = require('../../utils/package-id.js')
|
||||
|
||||
module.exports = function (staging, pkg, log, next) {
|
||||
log.silly('install', packageId(pkg))
|
||||
lifecycle(pkg.package, 'install', pkg.path, next)
|
||||
}
|
96
website/node_modules/npm/lib/install/action/move.js
generated
vendored
Normal file
96
website/node_modules/npm/lib/install/action/move.js
generated
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
'use strict'
|
||||
var fs = require('graceful-fs')
|
||||
var path = require('path')
|
||||
var chain = require('slide').chain
|
||||
var iferr = require('iferr')
|
||||
var rimraf = require('rimraf')
|
||||
var mkdirp = require('mkdirp')
|
||||
var rmStuff = require('../../unbuild.js').rmStuff
|
||||
var lifecycle = require('../../utils/lifecycle.js')
|
||||
var move = require('../../utils/move.js')
|
||||
|
||||
/*
|
||||
Move a module from one point in the node_modules tree to another.
|
||||
Do not disturb either the source or target location's node_modules
|
||||
folders.
|
||||
*/
|
||||
|
||||
module.exports = function (staging, pkg, log, next) {
|
||||
log.silly('move', pkg.fromPath, pkg.path)
|
||||
chain([
|
||||
[lifecycle, pkg.package, 'preuninstall', pkg.fromPath, { failOk: true }],
|
||||
[lifecycle, pkg.package, 'uninstall', pkg.fromPath, { failOk: true }],
|
||||
[rmStuff, pkg.package, pkg.fromPath],
|
||||
[lifecycle, pkg.package, 'postuninstall', pkg.fromPath, { failOk: true }],
|
||||
[moveModuleOnly, pkg.fromPath, pkg.path, log],
|
||||
[lifecycle, pkg.package, 'preinstall', pkg.path, { failOk: true }],
|
||||
[removeEmptyParents, path.resolve(pkg.fromPath, '..')]
|
||||
], next)
|
||||
}
|
||||
|
||||
function removeEmptyParents (pkgdir, next) {
|
||||
fs.rmdir(pkgdir, function (er) {
|
||||
// FIXME: Make sure windows does what we want here
|
||||
if (er && er.code !== 'ENOENT') return next()
|
||||
removeEmptyParents(path.resolve(pkgdir, '..'), next)
|
||||
})
|
||||
}
|
||||
|
||||
function moveModuleOnly (from, to, log, done) {
|
||||
var fromModules = path.join(from, 'node_modules')
|
||||
var tempFromModules = from + '.node_modules'
|
||||
var toModules = path.join(to, 'node_modules')
|
||||
var tempToModules = to + '.node_modules'
|
||||
|
||||
log.silly('move', 'move existing destination node_modules away', toModules)
|
||||
|
||||
move(toModules, tempToModules).then(removeDestination(done), removeDestination(done))
|
||||
|
||||
function removeDestination (next) {
|
||||
return function (er) {
|
||||
log.silly('move', 'remove existing destination', to)
|
||||
if (er) {
|
||||
rimraf(to, iferr(next, makeDestination(next)))
|
||||
} else {
|
||||
rimraf(to, iferr(next, makeDestination(iferr(next, moveToModulesBack(next)))))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function moveToModulesBack (next) {
|
||||
return function () {
|
||||
log.silly('move', 'move existing destination node_modules back', toModules)
|
||||
move(tempToModules, toModules).then(next, done)
|
||||
}
|
||||
}
|
||||
|
||||
function makeDestination (next) {
|
||||
return function () {
|
||||
log.silly('move', 'make sure destination parent exists', path.resolve(to, '..'))
|
||||
mkdirp(path.resolve(to, '..'), iferr(done, moveNodeModules(next)))
|
||||
}
|
||||
}
|
||||
|
||||
function moveNodeModules (next) {
|
||||
return function () {
|
||||
log.silly('move', 'move source node_modules away', fromModules)
|
||||
move(fromModules, tempFromModules).then(doMove(moveNodeModulesBack(next)), doMove(next))
|
||||
}
|
||||
}
|
||||
|
||||
function doMove (next) {
|
||||
return function () {
|
||||
log.silly('move', 'move module dir to final dest', from, to)
|
||||
move(from, to).then(next, done)
|
||||
}
|
||||
}
|
||||
|
||||
function moveNodeModulesBack (next) {
|
||||
return function () {
|
||||
mkdirp(from, iferr(done, function () {
|
||||
log.silly('move', 'put source node_modules back', fromModules)
|
||||
move(tempFromModules, fromModules).then(next, done)
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
8
website/node_modules/npm/lib/install/action/postinstall.js
generated
vendored
Normal file
8
website/node_modules/npm/lib/install/action/postinstall.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
'use strict'
|
||||
var lifecycle = require('../../utils/lifecycle.js')
|
||||
var packageId = require('../../utils/package-id.js')
|
||||
|
||||
module.exports = function (staging, pkg, log, next) {
|
||||
log.silly('postinstall', packageId(pkg))
|
||||
lifecycle(pkg.package, 'postinstall', pkg.path, next)
|
||||
}
|
8
website/node_modules/npm/lib/install/action/preinstall.js
generated
vendored
Normal file
8
website/node_modules/npm/lib/install/action/preinstall.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
'use strict'
|
||||
var lifecycle = require('../../utils/lifecycle.js')
|
||||
var packageId = require('../../utils/package-id.js')
|
||||
|
||||
module.exports = function (staging, pkg, log, next) {
|
||||
log.silly('preinstall', packageId(pkg))
|
||||
lifecycle(pkg.package, 'preinstall', pkg.path, next)
|
||||
}
|
27
website/node_modules/npm/lib/install/action/prepare.js
generated
vendored
Normal file
27
website/node_modules/npm/lib/install/action/prepare.js
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
'use strict'
|
||||
var chain = require('slide').chain
|
||||
var lifecycle = require('../../utils/lifecycle.js')
|
||||
var packageId = require('../../utils/package-id.js')
|
||||
var prepublishWarning = require('../../utils/warn-deprecated.js')('prepublish-on-install')
|
||||
var moduleStagingPath = require('../module-staging-path.js')
|
||||
|
||||
module.exports = function (staging, pkg, log, next) {
|
||||
log.silly('prepublish', packageId(pkg))
|
||||
// TODO: for `npm@5`, change the behavior and remove this warning.
|
||||
// see https://github.com/npm/npm/issues/10074 for details
|
||||
if (pkg.package && pkg.package.scripts && pkg.package.scripts.prepublish) {
|
||||
prepublishWarning([
|
||||
'As of npm@5, `prepublish` scripts are deprecated.',
|
||||
'Use `prepare` for build steps and `prepublishOnly` for upload-only.',
|
||||
'See the deprecation note in `npm help scripts` for more information.'
|
||||
])
|
||||
}
|
||||
var buildpath = moduleStagingPath(staging, pkg)
|
||||
chain(
|
||||
[
|
||||
[lifecycle, pkg.package, 'prepublish', buildpath],
|
||||
[lifecycle, pkg.package, 'prepare', buildpath]
|
||||
],
|
||||
next
|
||||
)
|
||||
}
|
45
website/node_modules/npm/lib/install/action/refresh-package-json.js
generated
vendored
Normal file
45
website/node_modules/npm/lib/install/action/refresh-package-json.js
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
'use strict'
|
||||
|
||||
const Bluebird = require('bluebird')
|
||||
|
||||
const checkPlatform = Bluebird.promisify(require('npm-install-checks').checkPlatform)
|
||||
const getRequested = require('../get-requested.js')
|
||||
const npm = require('../../npm.js')
|
||||
const path = require('path')
|
||||
const readJson = Bluebird.promisify(require('read-package-json'))
|
||||
const updatePackageJson = Bluebird.promisify(require('../update-package-json'))
|
||||
|
||||
module.exports = function (staging, pkg, log) {
|
||||
log.silly('refresh-package-json', pkg.realpath)
|
||||
|
||||
return readJson(path.join(pkg.path, 'package.json'), false).then((metadata) => {
|
||||
Object.keys(pkg.package).forEach(function (key) {
|
||||
if (key !== 'version' && key !== 'dependencies' && !isEmpty(pkg.package[key])) {
|
||||
metadata[key] = pkg.package[key]
|
||||
}
|
||||
})
|
||||
if (metadata._resolved == null && pkg.fakeChild) {
|
||||
metadata._resolved = pkg.fakeChild.resolved
|
||||
}
|
||||
// These two sneak in and it's awful
|
||||
delete metadata.readme
|
||||
delete metadata.readmeFilename
|
||||
|
||||
pkg.package = metadata
|
||||
pkg.fakeChild = false
|
||||
}).catch(() => 'ignore').then(() => {
|
||||
return checkPlatform(pkg.package, npm.config.get('force'))
|
||||
}).then(() => {
|
||||
const requested = pkg.package._requested || getRequested(pkg)
|
||||
if (requested.type !== 'directory') {
|
||||
return updatePackageJson(pkg, pkg.path)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function isEmpty (value) {
|
||||
if (value == null) return true
|
||||
if (Array.isArray(value)) return !value.length
|
||||
if (typeof value === 'object') return !Object.keys(value).length
|
||||
return false
|
||||
}
|
85
website/node_modules/npm/lib/install/action/remove.js
generated
vendored
Normal file
85
website/node_modules/npm/lib/install/action/remove.js
generated
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
'use strict'
|
||||
var path = require('path')
|
||||
var fs = require('graceful-fs')
|
||||
var rimraf = require('rimraf')
|
||||
var asyncMap = require('slide').asyncMap
|
||||
var mkdirp = require('mkdirp')
|
||||
var npm = require('../../npm.js')
|
||||
var andIgnoreErrors = require('../and-ignore-errors.js')
|
||||
var move = require('../../utils/move.js')
|
||||
var isInside = require('path-is-inside')
|
||||
var vacuum = require('fs-vacuum')
|
||||
|
||||
// This is weird because we want to remove the module but not it's node_modules folder
|
||||
// allowing for this allows us to not worry about the order of operations
|
||||
module.exports = function (staging, pkg, log, next) {
|
||||
log.silly('remove', pkg.path)
|
||||
if (pkg.target) {
|
||||
removeLink(pkg, next)
|
||||
} else {
|
||||
removeDir(pkg, log, next)
|
||||
}
|
||||
}
|
||||
|
||||
function removeLink (pkg, next) {
|
||||
var base = isInside(pkg.path, npm.prefix) ? npm.prefix : pkg.path
|
||||
rimraf(pkg.path, (err) => {
|
||||
if (err) return next(err)
|
||||
vacuum(pkg.path, {base: base}, next)
|
||||
})
|
||||
}
|
||||
|
||||
function removeDir (pkg, log, next) {
|
||||
var modpath = path.join(path.dirname(pkg.path), '.' + path.basename(pkg.path) + '.MODULES')
|
||||
|
||||
move(path.join(pkg.path, 'node_modules'), modpath).then(unbuildPackage, unbuildPackage)
|
||||
|
||||
function unbuildPackage (moveEr) {
|
||||
rimraf(pkg.path, moveEr ? andRemoveEmptyParents(pkg.path) : moveModulesBack)
|
||||
}
|
||||
|
||||
function andRemoveEmptyParents (path) {
|
||||
return function (er) {
|
||||
if (er) return next(er)
|
||||
removeEmptyParents(pkg.path)
|
||||
}
|
||||
}
|
||||
|
||||
function moveModulesBack () {
|
||||
fs.readdir(modpath, makeTarget)
|
||||
}
|
||||
|
||||
function makeTarget (readdirEr, files) {
|
||||
if (readdirEr) return cleanup()
|
||||
if (!files.length) return cleanup()
|
||||
mkdirp(path.join(pkg.path, 'node_modules'), function (mkdirEr) { moveModules(mkdirEr, files) })
|
||||
}
|
||||
|
||||
function moveModules (mkdirEr, files) {
|
||||
if (mkdirEr) return next(mkdirEr)
|
||||
asyncMap(files, function (file, done) {
|
||||
var from = path.join(modpath, file)
|
||||
var to = path.join(pkg.path, 'node_modules', file)
|
||||
// we ignore errors here, because they can legitimately happen, for instance,
|
||||
// bundled modules will be in both node_modules folders
|
||||
move(from, to).then(andIgnoreErrors(done), andIgnoreErrors(done))
|
||||
}, cleanup)
|
||||
}
|
||||
|
||||
function cleanup () {
|
||||
rimraf(modpath, afterCleanup)
|
||||
}
|
||||
|
||||
function afterCleanup (rimrafEr) {
|
||||
if (rimrafEr) log.warn('remove', rimrafEr)
|
||||
removeEmptyParents(path.resolve(pkg.path, '..'))
|
||||
}
|
||||
|
||||
function removeEmptyParents (pkgdir) {
|
||||
fs.rmdir(pkgdir, function (er) {
|
||||
// FIXME: Make sure windows does what we want here
|
||||
if (er && er.code !== 'ENOENT') return next()
|
||||
removeEmptyParents(path.resolve(pkgdir, '..'))
|
||||
})
|
||||
}
|
||||
}
|
16
website/node_modules/npm/lib/install/action/unbuild.js
generated
vendored
Normal file
16
website/node_modules/npm/lib/install/action/unbuild.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
'use strict'
|
||||
var Bluebird = require('bluebird')
|
||||
var lifecycle = Bluebird.promisify(require('../../utils/lifecycle.js'))
|
||||
var packageId = require('../../utils/package-id.js')
|
||||
var rmStuff = Bluebird.promisify(require('../../unbuild.js').rmStuff)
|
||||
|
||||
module.exports = function (staging, pkg, log) {
|
||||
log.silly('unbuild', packageId(pkg))
|
||||
return lifecycle(pkg.package, 'preuninstall', pkg.path, { failOk: true }).then(() => {
|
||||
return lifecycle(pkg.package, 'uninstall', pkg.path, { failOk: true })
|
||||
}).then(() => {
|
||||
return rmStuff(pkg.package, pkg.path)
|
||||
}).then(() => {
|
||||
return lifecycle(pkg.package, 'postuninstall', pkg.path, { failOk: true })
|
||||
})
|
||||
}
|
192
website/node_modules/npm/lib/install/actions.js
generated
vendored
Normal file
192
website/node_modules/npm/lib/install/actions.js
generated
vendored
Normal file
@@ -0,0 +1,192 @@
|
||||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
const andAddParentToErrors = require('./and-add-parent-to-errors.js')
|
||||
const failedDependency = require('./deps.js').failedDependency
|
||||
const isInstallable = BB.promisify(require('./validate-args.js').isInstallable)
|
||||
const moduleName = require('../utils/module-name.js')
|
||||
const npm = require('../npm.js')
|
||||
const reportOptionalFailure = require('./report-optional-failure.js')
|
||||
const validate = require('aproba')
|
||||
|
||||
const actions = {}
|
||||
|
||||
actions.fetch = require('./action/fetch.js')
|
||||
actions.extract = require('./action/extract.js')
|
||||
actions.build = require('./action/build.js')
|
||||
actions.preinstall = require('./action/preinstall.js')
|
||||
actions.install = require('./action/install.js')
|
||||
actions.postinstall = require('./action/postinstall.js')
|
||||
actions.prepare = require('./action/prepare.js')
|
||||
actions.finalize = require('./action/finalize.js')
|
||||
actions.remove = require('./action/remove.js')
|
||||
actions.unbuild = require('./action/unbuild.js')
|
||||
actions.move = require('./action/move.js')
|
||||
actions['global-install'] = require('./action/global-install.js')
|
||||
actions['global-link'] = require('./action/global-link.js')
|
||||
actions['refresh-package-json'] = require('./action/refresh-package-json.js')
|
||||
|
||||
// FIXME: We wrap actions like three ways to sunday here.
|
||||
// Rewrite this to only work one way.
|
||||
|
||||
Object.keys(actions).forEach(function (actionName) {
|
||||
var action = actions[actionName]
|
||||
actions[actionName] = (staging, pkg, log) => {
|
||||
validate('SOO', [staging, pkg, log])
|
||||
// refuse to run actions for failed packages
|
||||
if (pkg.failed) return BB.resolve()
|
||||
if (action.rollback) {
|
||||
if (!pkg.rollback) pkg.rollback = []
|
||||
pkg.rollback.unshift(action.rollback)
|
||||
}
|
||||
if (action.commit) {
|
||||
if (!pkg.commit) pkg.commit = []
|
||||
pkg.commit.push(action.commit)
|
||||
}
|
||||
|
||||
let actionP
|
||||
if (pkg.knownInstallable) {
|
||||
actionP = runAction(action, staging, pkg, log)
|
||||
} else {
|
||||
actionP = isInstallable(pkg.package).then(() => {
|
||||
pkg.knownInstallable = true
|
||||
return runAction(action, staging, pkg, log)
|
||||
})
|
||||
}
|
||||
|
||||
return actionP.then(() => {
|
||||
log.finish()
|
||||
}, (err) => {
|
||||
return BB.fromNode((cb) => {
|
||||
andAddParentToErrors(pkg.parent, cb)(err)
|
||||
}).catch((err) => {
|
||||
return handleOptionalDepErrors(pkg, err)
|
||||
})
|
||||
})
|
||||
}
|
||||
actions[actionName].init = action.init || (() => BB.resolve())
|
||||
actions[actionName].teardown = action.teardown || (() => BB.resolve())
|
||||
})
|
||||
exports.actions = actions
|
||||
|
||||
function runAction (action, staging, pkg, log) {
|
||||
return BB.fromNode((cb) => {
|
||||
const result = action(staging, pkg, log, cb)
|
||||
if (result && result.then) {
|
||||
result.then(() => cb(), cb)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function markAsFailed (pkg) {
|
||||
if (pkg.failed) return
|
||||
pkg.failed = true
|
||||
pkg.requires.forEach((req) => {
|
||||
var requiredBy = req.requiredBy.filter((reqReqBy) => !reqReqBy.failed)
|
||||
if (requiredBy.length === 0 && !req.userRequired) {
|
||||
markAsFailed(req)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function handleOptionalDepErrors (pkg, err) {
|
||||
markAsFailed(pkg)
|
||||
var anyFatal = failedDependency(pkg)
|
||||
if (anyFatal) {
|
||||
throw err
|
||||
} else {
|
||||
reportOptionalFailure(pkg, null, err)
|
||||
}
|
||||
}
|
||||
|
||||
exports.doOne = doOne
|
||||
function doOne (cmd, staging, pkg, log, next) {
|
||||
validate('SSOOF', arguments)
|
||||
const prepped = prepareAction([cmd, pkg], staging, log)
|
||||
return withInit(actions[cmd], () => {
|
||||
return execAction(prepped)
|
||||
}).nodeify(next)
|
||||
}
|
||||
|
||||
exports.doParallel = doParallel
|
||||
function doParallel (type, staging, actionsToRun, log, next) {
|
||||
validate('SSAOF', arguments)
|
||||
const acts = actionsToRun.reduce((acc, todo) => {
|
||||
if (todo[0] === type) {
|
||||
acc.push(prepareAction(todo, staging, log))
|
||||
}
|
||||
return acc
|
||||
}, [])
|
||||
log.silly('doParallel', type + ' ' + acts.length)
|
||||
time(log)
|
||||
if (!acts.length) { return next() }
|
||||
return withInit(actions[type], () => {
|
||||
return BB.map(acts, execAction, {
|
||||
concurrency: npm.limit.action
|
||||
})
|
||||
}).nodeify((err) => {
|
||||
log.finish()
|
||||
timeEnd(log)
|
||||
next(err)
|
||||
})
|
||||
}
|
||||
|
||||
exports.doSerial = doSerial
|
||||
function doSerial (type, staging, actionsToRun, log, next) {
|
||||
validate('SSAOF', arguments)
|
||||
log.silly('doSerial', '%s %d', type, actionsToRun.length)
|
||||
runSerial(type, staging, actionsToRun, log, next)
|
||||
}
|
||||
|
||||
exports.doReverseSerial = doReverseSerial
|
||||
function doReverseSerial (type, staging, actionsToRun, log, next) {
|
||||
validate('SSAOF', arguments)
|
||||
log.silly('doReverseSerial', '%s %d', type, actionsToRun.length)
|
||||
runSerial(type, staging, [].concat(actionsToRun).reverse(), log, next)
|
||||
}
|
||||
|
||||
function runSerial (type, staging, actionsToRun, log, next) {
|
||||
const acts = actionsToRun.reduce((acc, todo) => {
|
||||
if (todo[0] === type) {
|
||||
acc.push(prepareAction(todo, staging, log))
|
||||
}
|
||||
return acc
|
||||
}, [])
|
||||
time(log)
|
||||
if (!acts.length) { return next() }
|
||||
return withInit(actions[type], () => {
|
||||
return BB.each(acts, execAction)
|
||||
}).nodeify((err) => {
|
||||
log.finish()
|
||||
timeEnd(log)
|
||||
next(err)
|
||||
})
|
||||
}
|
||||
|
||||
function time (log) {
|
||||
process.emit('time', 'action:' + log.name)
|
||||
}
|
||||
function timeEnd (log) {
|
||||
process.emit('timeEnd', 'action:' + log.name)
|
||||
}
|
||||
|
||||
function withInit (action, body) {
|
||||
return BB.using(
|
||||
action.init().disposer(() => action.teardown()),
|
||||
body
|
||||
)
|
||||
}
|
||||
|
||||
function prepareAction (action, staging, log) {
|
||||
validate('ASO', arguments)
|
||||
validate('SO', action)
|
||||
var cmd = action[0]
|
||||
var pkg = action[1]
|
||||
if (!actions[cmd]) throw new Error('Unknown decomposed command "' + cmd + '" (is it new?)')
|
||||
return [actions[cmd], staging, pkg, log.newGroup(cmd + ':' + moduleName(pkg))]
|
||||
}
|
||||
|
||||
function execAction (todo) {
|
||||
return todo[0].apply(null, todo.slice(1))
|
||||
}
|
13
website/node_modules/npm/lib/install/and-add-parent-to-errors.js
generated
vendored
Normal file
13
website/node_modules/npm/lib/install/and-add-parent-to-errors.js
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
'use strict'
|
||||
var validate = require('aproba')
|
||||
|
||||
module.exports = function (parent, cb) {
|
||||
validate('F', [cb])
|
||||
return function (er) {
|
||||
if (!er) return cb.apply(null, arguments)
|
||||
if (er instanceof Error && parent && parent.package && parent.package.name) {
|
||||
er.parent = parent.package.name
|
||||
}
|
||||
cb(er)
|
||||
}
|
||||
}
|
16
website/node_modules/npm/lib/install/and-finish-tracker.js
generated
vendored
Normal file
16
website/node_modules/npm/lib/install/and-finish-tracker.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
'use strict'
|
||||
var validate = require('aproba')
|
||||
|
||||
module.exports = function (tracker, cb) {
|
||||
validate('OF', [tracker, cb])
|
||||
return function () {
|
||||
tracker.finish()
|
||||
cb.apply(null, arguments)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.now = function (tracker, cb) {
|
||||
validate('OF', [tracker, cb])
|
||||
tracker.finish()
|
||||
cb.apply(null, Array.prototype.slice.call(arguments, 2))
|
||||
}
|
9
website/node_modules/npm/lib/install/and-ignore-errors.js
generated
vendored
Normal file
9
website/node_modules/npm/lib/install/and-ignore-errors.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = function (cb) {
|
||||
return function () {
|
||||
var args = Array.prototype.slice.call(arguments, 1)
|
||||
if (args.length) args.unshift(null)
|
||||
return cb.apply(null, args)
|
||||
}
|
||||
}
|
282
website/node_modules/npm/lib/install/audit.js
generated
vendored
Normal file
282
website/node_modules/npm/lib/install/audit.js
generated
vendored
Normal file
@@ -0,0 +1,282 @@
|
||||
'use strict'
|
||||
exports.generate = generate
|
||||
exports.generateFromInstall = generateFromInstall
|
||||
exports.submitForInstallReport = submitForInstallReport
|
||||
exports.submitForFullReport = submitForFullReport
|
||||
exports.printInstallReport = printInstallReport
|
||||
exports.printParseableReport = printParseableReport
|
||||
exports.printFullReport = printFullReport
|
||||
|
||||
const Bluebird = require('bluebird')
|
||||
const auditReport = require('npm-audit-report')
|
||||
const treeToShrinkwrap = require('../shrinkwrap.js').treeToShrinkwrap
|
||||
const packageId = require('../utils/package-id.js')
|
||||
const output = require('../utils/output.js')
|
||||
const npm = require('../npm.js')
|
||||
const qw = require('qw')
|
||||
const registryFetch = require('npm-registry-fetch')
|
||||
const zlib = require('zlib')
|
||||
const gzip = Bluebird.promisify(zlib.gzip)
|
||||
const log = require('npmlog')
|
||||
const perf = require('../utils/perf.js')
|
||||
const url = require('url')
|
||||
const npa = require('npm-package-arg')
|
||||
const uuid = require('uuid')
|
||||
const ssri = require('ssri')
|
||||
const cloneDeep = require('lodash.clonedeep')
|
||||
const pacoteOpts = require('../config/pacote.js')
|
||||
|
||||
// used when scrubbing module names/specifiers
|
||||
const runId = uuid.v4()
|
||||
|
||||
function submitForInstallReport (auditData) {
|
||||
const cfg = npm.config // avoid the no-dynamic-lookups test
|
||||
const scopedRegistries = cfg.keys.filter(_ => /:registry$/.test(_)).map(_ => cfg.get(_))
|
||||
perf.emit('time', 'audit compress')
|
||||
// TODO: registryFetch will be adding native support for `Content-Encoding: gzip` at which point
|
||||
// we'll pass in something like `gzip: true` and not need to JSON stringify, gzip or headers.
|
||||
return gzip(JSON.stringify(auditData)).then(body => {
|
||||
perf.emit('timeEnd', 'audit compress')
|
||||
log.info('audit', 'Submitting payload of ' + body.length + 'bytes')
|
||||
scopedRegistries.forEach(reg => {
|
||||
// we don't care about the response so destroy the stream if we can, or leave it flowing
|
||||
// so it can eventually finish and clean up after itself
|
||||
fetchAudit(url.resolve(reg, '/-/npm/v1/security/audits/quick'))
|
||||
.then(_ => {
|
||||
_.body.on('error', () => {})
|
||||
if (_.body.destroy) {
|
||||
_.body.destroy()
|
||||
} else {
|
||||
_.body.resume()
|
||||
}
|
||||
}, _ => {})
|
||||
})
|
||||
perf.emit('time', 'audit submit')
|
||||
return fetchAudit('/-/npm/v1/security/audits/quick', body).then(response => {
|
||||
perf.emit('timeEnd', 'audit submit')
|
||||
perf.emit('time', 'audit body')
|
||||
return response.json()
|
||||
}).then(result => {
|
||||
perf.emit('timeEnd', 'audit body')
|
||||
return result
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function submitForFullReport (auditData) {
|
||||
perf.emit('time', 'audit compress')
|
||||
// TODO: registryFetch will be adding native support for `Content-Encoding: gzip` at which point
|
||||
// we'll pass in something like `gzip: true` and not need to JSON stringify, gzip or headers.
|
||||
return gzip(JSON.stringify(auditData)).then(body => {
|
||||
perf.emit('timeEnd', 'audit compress')
|
||||
log.info('audit', 'Submitting payload of ' + body.length + ' bytes')
|
||||
perf.emit('time', 'audit submit')
|
||||
return fetchAudit('/-/npm/v1/security/audits', body).then(response => {
|
||||
perf.emit('timeEnd', 'audit submit')
|
||||
perf.emit('time', 'audit body')
|
||||
return response.json()
|
||||
}).then(result => {
|
||||
perf.emit('timeEnd', 'audit body')
|
||||
result.runId = runId
|
||||
return result
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function fetchAudit (href, body) {
|
||||
const opts = pacoteOpts()
|
||||
return registryFetch(href, {
|
||||
method: 'POST',
|
||||
headers: { 'content-encoding': 'gzip', 'content-type': 'application/json' },
|
||||
config: npm.config,
|
||||
npmSession: opts.npmSession,
|
||||
projectScope: npm.projectScope,
|
||||
log: log,
|
||||
body: body
|
||||
})
|
||||
}
|
||||
|
||||
function printInstallReport (auditResult) {
|
||||
return auditReport(auditResult, {
|
||||
reporter: 'install',
|
||||
withColor: npm.color,
|
||||
withUnicode: npm.config.get('unicode')
|
||||
}).then(result => output(result.report))
|
||||
}
|
||||
|
||||
function printFullReport (auditResult) {
|
||||
return auditReport(auditResult, {
|
||||
log: output,
|
||||
reporter: npm.config.get('json') ? 'json' : 'detail',
|
||||
withColor: npm.color,
|
||||
withUnicode: npm.config.get('unicode')
|
||||
}).then(result => output(result.report))
|
||||
}
|
||||
|
||||
function printParseableReport (auditResult) {
|
||||
return auditReport(auditResult, {
|
||||
log: output,
|
||||
reporter: 'parseable',
|
||||
withColor: npm.color,
|
||||
withUnicode: npm.config.get('unicode')
|
||||
}).then(result => output(result.report))
|
||||
}
|
||||
|
||||
function generate (shrinkwrap, requires, diffs, install, remove) {
|
||||
const sw = cloneDeep(shrinkwrap)
|
||||
delete sw.lockfileVersion
|
||||
sw.requires = scrubRequires(requires)
|
||||
scrubDeps(sw.dependencies)
|
||||
|
||||
// sw.diffs = diffs || {}
|
||||
sw.install = (install || []).map(scrubArg)
|
||||
sw.remove = (remove || []).map(scrubArg)
|
||||
return generateMetadata().then((md) => {
|
||||
sw.metadata = md
|
||||
return sw
|
||||
})
|
||||
}
|
||||
|
||||
const scrubKeys = qw`version`
|
||||
const deleteKeys = qw`from resolved`
|
||||
|
||||
function scrubDeps (deps) {
|
||||
if (!deps) return
|
||||
Object.keys(deps).forEach(name => {
|
||||
if (!shouldScrubName(name) && !shouldScrubSpec(name, deps[name].version)) return
|
||||
const value = deps[name]
|
||||
delete deps[name]
|
||||
deps[scrub(name)] = value
|
||||
})
|
||||
Object.keys(deps).forEach(name => {
|
||||
for (let toScrub of scrubKeys) {
|
||||
if (!deps[name][toScrub]) continue
|
||||
deps[name][toScrub] = scrubSpec(name, deps[name][toScrub])
|
||||
}
|
||||
for (let toDelete of deleteKeys) delete deps[name][toDelete]
|
||||
|
||||
scrubRequires(deps[name].requires)
|
||||
scrubDeps(deps[name].dependencies)
|
||||
})
|
||||
}
|
||||
|
||||
function scrubRequires (reqs) {
|
||||
if (!reqs) return reqs
|
||||
Object.keys(reqs).forEach(name => {
|
||||
const spec = reqs[name]
|
||||
if (shouldScrubName(name) || shouldScrubSpec(name, spec)) {
|
||||
delete reqs[name]
|
||||
reqs[scrub(name)] = scrubSpec(name, spec)
|
||||
} else {
|
||||
reqs[name] = scrubSpec(name, spec)
|
||||
}
|
||||
})
|
||||
return reqs
|
||||
}
|
||||
|
||||
function getScope (name) {
|
||||
if (name[0] === '@') return name.slice(0, name.indexOf('/'))
|
||||
}
|
||||
|
||||
function shouldScrubName (name) {
|
||||
const scope = getScope(name)
|
||||
const cfg = npm.config // avoid the no-dynamic-lookups test
|
||||
return Boolean(scope && cfg.get(scope + ':registry'))
|
||||
}
|
||||
function shouldScrubSpec (name, spec) {
|
||||
const req = npa.resolve(name, spec)
|
||||
return !req.registry
|
||||
}
|
||||
|
||||
function scrubArg (arg) {
|
||||
const req = npa(arg)
|
||||
let name = req.name
|
||||
if (shouldScrubName(name) || shouldScrubSpec(name, req.rawSpec)) {
|
||||
name = scrubName(name)
|
||||
}
|
||||
const spec = scrubSpec(req.name, req.rawSpec)
|
||||
return name + '@' + spec
|
||||
}
|
||||
|
||||
function scrubName (name) {
|
||||
return shouldScrubName(name) ? scrub(name) : name
|
||||
}
|
||||
|
||||
function scrubSpec (name, spec) {
|
||||
const req = npa.resolve(name, spec)
|
||||
if (req.registry) return spec
|
||||
if (req.type === 'git') {
|
||||
return 'git+ssh://' + scrub(spec)
|
||||
} else if (req.type === 'remote') {
|
||||
return 'https://' + scrub(spec)
|
||||
} else if (req.type === 'directory') {
|
||||
return 'file:' + scrub(spec)
|
||||
} else if (req.type === 'file') {
|
||||
return 'file:' + scrub(spec) + '.tar'
|
||||
} else {
|
||||
return scrub(spec)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.scrub = scrub
|
||||
function scrub (value, rid) {
|
||||
return ssri.fromData((rid || runId) + ' ' + value, {algorithms: ['sha256']}).hexDigest()
|
||||
}
|
||||
|
||||
function generateMetadata () {
|
||||
const meta = {}
|
||||
meta.npm_version = npm.version
|
||||
meta.node_version = process.version
|
||||
meta.platform = process.platform
|
||||
meta.node_env = process.env.NODE_ENV
|
||||
|
||||
return Promise.resolve(meta)
|
||||
}
|
||||
/*
|
||||
const head = path.resolve(npm.prefix, '.git/HEAD')
|
||||
return readFile(head, 'utf8').then((head) => {
|
||||
if (!head.match(/^ref: /)) {
|
||||
meta.commit_hash = head.trim()
|
||||
return
|
||||
}
|
||||
const headFile = head.replace(/^ref: /, '').trim()
|
||||
meta.branch = headFile.replace(/^refs[/]heads[/]/, '')
|
||||
return readFile(path.resolve(npm.prefix, '.git', headFile), 'utf8')
|
||||
}).then((commitHash) => {
|
||||
meta.commit_hash = commitHash.trim()
|
||||
const proc = spawn('git', qw`diff --quiet --exit-code package.json package-lock.json`, {cwd: npm.prefix, stdio: 'ignore'})
|
||||
return new Promise((resolve, reject) => {
|
||||
proc.once('error', reject)
|
||||
proc.on('exit', (code, signal) => {
|
||||
if (signal == null) meta.state = code === 0 ? 'clean' : 'dirty'
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
}).then(() => meta, () => meta)
|
||||
*/
|
||||
|
||||
function generateFromInstall (tree, diffs, install, remove) {
|
||||
const requires = {}
|
||||
tree.requires.forEach((pkg) => {
|
||||
requires[pkg.package.name] = tree.package.dependencies[pkg.package.name] || tree.package.devDependencies[pkg.package.name] || pkg.package.version
|
||||
})
|
||||
|
||||
const auditInstall = (install || []).filter((a) => a.name).map(packageId)
|
||||
const auditRemove = (remove || []).filter((a) => a.name).map(packageId)
|
||||
const auditDiffs = {}
|
||||
diffs.forEach((action) => {
|
||||
const mutation = action[0]
|
||||
const child = action[1]
|
||||
if (mutation !== 'add' && mutation !== 'update' && mutation !== 'remove') return
|
||||
if (!auditDiffs[mutation]) auditDiffs[mutation] = []
|
||||
if (mutation === 'add') {
|
||||
auditDiffs[mutation].push({location: child.location})
|
||||
} else if (mutation === 'update') {
|
||||
auditDiffs[mutation].push({location: child.location, previous: packageId(child.oldPkg)})
|
||||
} else if (mutation === 'remove') {
|
||||
auditDiffs[mutation].push({previous: packageId(child)})
|
||||
}
|
||||
})
|
||||
|
||||
return generate(treeToShrinkwrap(tree), requires, auditDiffs, auditInstall, auditRemove)
|
||||
}
|
69
website/node_modules/npm/lib/install/check-permissions.js
generated
vendored
Normal file
69
website/node_modules/npm/lib/install/check-permissions.js
generated
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
'use strict'
|
||||
var path = require('path')
|
||||
var log = require('npmlog')
|
||||
var validate = require('aproba')
|
||||
var uniq = require('lodash.uniq')
|
||||
var asyncMap = require('slide').asyncMap
|
||||
var npm = require('../npm.js')
|
||||
var exists = require('./exists.js')
|
||||
var writable = require('./writable.js')
|
||||
|
||||
module.exports = function (actions, next) {
|
||||
validate('AF', arguments)
|
||||
var errors = []
|
||||
asyncMap(actions, function (action, done) {
|
||||
var cmd = action[0]
|
||||
var pkg = action[1]
|
||||
switch (cmd) {
|
||||
case 'add':
|
||||
hasAnyWriteAccess(path.resolve(pkg.path, '..'), errors, done)
|
||||
break
|
||||
case 'update':
|
||||
case 'remove':
|
||||
hasWriteAccess(pkg.path, errors, andHasWriteAccess(path.resolve(pkg.path, '..'), errors, done))
|
||||
break
|
||||
case 'move':
|
||||
hasAnyWriteAccess(pkg.path, errors, andHasWriteAccess(path.resolve(pkg.fromPath, '..'), errors, done))
|
||||
break
|
||||
default:
|
||||
done()
|
||||
}
|
||||
}, function () {
|
||||
if (!errors.length) return next()
|
||||
uniq(errors.map(function (er) { return 'Missing write access to ' + er.path })).forEach(function (er) {
|
||||
log.warn('checkPermissions', er)
|
||||
})
|
||||
npm.config.get('force') ? next() : next(errors[0])
|
||||
})
|
||||
}
|
||||
|
||||
function andHasWriteAccess (dir, errors, done) {
|
||||
validate('SAF', arguments)
|
||||
return function () {
|
||||
hasWriteAccess(dir, errors, done)
|
||||
}
|
||||
}
|
||||
|
||||
function hasAnyWriteAccess (dir, errors, done) {
|
||||
validate('SAF', arguments)
|
||||
findNearestDir()
|
||||
function findNearestDir () {
|
||||
var nextDir = path.resolve(dir, '..')
|
||||
exists(dir, function (dirDoesntExist) {
|
||||
if (!dirDoesntExist || nextDir === dir) {
|
||||
return hasWriteAccess(dir, errors, done)
|
||||
} else {
|
||||
dir = nextDir
|
||||
findNearestDir()
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function hasWriteAccess (dir, errors, done) {
|
||||
validate('SAF', arguments)
|
||||
writable(dir, function (er) {
|
||||
if (er) errors.push(er)
|
||||
done()
|
||||
})
|
||||
}
|
30
website/node_modules/npm/lib/install/copy-tree.js
generated
vendored
Normal file
30
website/node_modules/npm/lib/install/copy-tree.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
'use strict'
|
||||
var createNode = require('./node.js').create
|
||||
module.exports = function (tree) {
|
||||
return copyTree(tree, {})
|
||||
}
|
||||
|
||||
function copyTree (tree, cache) {
|
||||
if (cache[tree.path]) { return cache[tree.path] }
|
||||
var newTree = cache[tree.path] = createNode(Object.assign({}, tree))
|
||||
copyModuleList(newTree, 'children', cache)
|
||||
newTree.children.forEach(function (child) {
|
||||
child.parent = newTree
|
||||
})
|
||||
copyModuleList(newTree, 'requires', cache)
|
||||
copyModuleList(newTree, 'requiredBy', cache)
|
||||
return newTree
|
||||
}
|
||||
|
||||
function copyModuleList (tree, key, cache) {
|
||||
var newList = []
|
||||
if (tree[key]) {
|
||||
tree[key].forEach(function (child) {
|
||||
const copy = copyTree(child, cache)
|
||||
if (copy) {
|
||||
newList.push(copy)
|
||||
}
|
||||
})
|
||||
}
|
||||
tree[key] = newList
|
||||
}
|
79
website/node_modules/npm/lib/install/decompose-actions.js
generated
vendored
Normal file
79
website/node_modules/npm/lib/install/decompose-actions.js
generated
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
'use strict'
|
||||
var validate = require('aproba')
|
||||
var npm = require('../npm.js')
|
||||
|
||||
module.exports = function (differences, decomposed, next) {
|
||||
validate('AAF', arguments)
|
||||
differences.forEach((action) => {
|
||||
var cmd = action[0]
|
||||
var pkg = action[1]
|
||||
switch (cmd) {
|
||||
case 'add':
|
||||
addSteps(decomposed, pkg)
|
||||
break
|
||||
case 'update':
|
||||
updateSteps(decomposed, pkg)
|
||||
break
|
||||
case 'move':
|
||||
moveSteps(decomposed, pkg)
|
||||
break
|
||||
case 'remove':
|
||||
removeSteps(decomposed, pkg)
|
||||
break
|
||||
default:
|
||||
defaultSteps(decomposed, cmd, pkg)
|
||||
}
|
||||
})
|
||||
next()
|
||||
}
|
||||
|
||||
function addAction (decomposed, action, pkg) {
|
||||
if (decomposed.some((_) => _[0] === action && _[1] === pkg)) return
|
||||
decomposed.push([action, pkg])
|
||||
}
|
||||
|
||||
function addSteps (decomposed, pkg) {
|
||||
if (pkg.fromBundle) {
|
||||
// make sure our source module exists to extract ourselves from
|
||||
// if we're installing our source module anyway, the duplication
|
||||
// of these steps will be elided by `addAction` automatically
|
||||
addAction(decomposed, 'fetch', pkg.fromBundle)
|
||||
addAction(decomposed, 'extract', pkg.fromBundle)
|
||||
}
|
||||
if (!pkg.fromBundle && !pkg.isLink) {
|
||||
addAction(decomposed, 'fetch', pkg)
|
||||
addAction(decomposed, 'extract', pkg)
|
||||
}
|
||||
if (!pkg.fromBundle || npm.config.get('rebuild-bundle')) {
|
||||
addAction(decomposed, 'preinstall', pkg)
|
||||
addAction(decomposed, 'build', pkg)
|
||||
addAction(decomposed, 'install', pkg)
|
||||
addAction(decomposed, 'postinstall', pkg)
|
||||
}
|
||||
if (!pkg.fromBundle || !pkg.isLink) {
|
||||
addAction(decomposed, 'finalize', pkg)
|
||||
}
|
||||
addAction(decomposed, 'refresh-package-json', pkg)
|
||||
}
|
||||
|
||||
function updateSteps (decomposed, pkg) {
|
||||
removeSteps(decomposed, pkg.oldPkg)
|
||||
addSteps(decomposed, pkg)
|
||||
}
|
||||
|
||||
function removeSteps (decomposed, pkg) {
|
||||
addAction(decomposed, 'unbuild', pkg)
|
||||
addAction(decomposed, 'remove', pkg)
|
||||
}
|
||||
|
||||
function moveSteps (decomposed, pkg) {
|
||||
addAction(decomposed, 'move', pkg)
|
||||
addAction(decomposed, 'build', pkg)
|
||||
addAction(decomposed, 'install', pkg)
|
||||
addAction(decomposed, 'postinstall', pkg)
|
||||
addAction(decomposed, 'refresh-package-json', pkg)
|
||||
}
|
||||
|
||||
function defaultSteps (decomposed, cmd, pkg) {
|
||||
addAction(decomposed, cmd, pkg)
|
||||
}
|
816
website/node_modules/npm/lib/install/deps.js
generated
vendored
Normal file
816
website/node_modules/npm/lib/install/deps.js
generated
vendored
Normal file
@@ -0,0 +1,816 @@
|
||||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
var fs = require('fs')
|
||||
var assert = require('assert')
|
||||
var path = require('path')
|
||||
var semver = require('semver')
|
||||
var asyncMap = require('slide').asyncMap
|
||||
var chain = require('slide').chain
|
||||
var iferr = require('iferr')
|
||||
var npa = require('npm-package-arg')
|
||||
var validate = require('aproba')
|
||||
var dezalgo = require('dezalgo')
|
||||
var fetchPackageMetadata = require('../fetch-package-metadata.js')
|
||||
var andAddParentToErrors = require('./and-add-parent-to-errors.js')
|
||||
var addBundled = require('../fetch-package-metadata.js').addBundled
|
||||
var readShrinkwrap = require('./read-shrinkwrap.js')
|
||||
var inflateShrinkwrap = require('./inflate-shrinkwrap.js')
|
||||
var inflateBundled = require('./inflate-bundled.js')
|
||||
var andFinishTracker = require('./and-finish-tracker.js')
|
||||
var npm = require('../npm.js')
|
||||
var flatNameFromTree = require('./flatten-tree.js').flatNameFromTree
|
||||
var createChild = require('./node.js').create
|
||||
var resetMetadata = require('./node.js').reset
|
||||
var isInstallable = require('./validate-args.js').isInstallable
|
||||
var packageId = require('../utils/package-id.js')
|
||||
var moduleName = require('../utils/module-name.js')
|
||||
var isDevDep = require('./is-dev-dep.js')
|
||||
var isProdDep = require('./is-prod-dep.js')
|
||||
var reportOptionalFailure = require('./report-optional-failure.js')
|
||||
var getSaveType = require('./save.js').getSaveType
|
||||
var unixFormatPath = require('../utils/unix-format-path.js')
|
||||
var isExtraneous = require('./is-extraneous.js')
|
||||
var isRegistry = require('../utils/is-registry.js')
|
||||
var hasModernMeta = require('./has-modern-meta.js')
|
||||
|
||||
// The export functions in this module mutate a dependency tree, adding
|
||||
// items to them.
|
||||
|
||||
var registryTypes = { range: true, version: true }
|
||||
|
||||
function doesChildVersionMatch (child, requested, requestor) {
|
||||
if (child.fromShrinkwrap && !child.hasRequiresFromLock) return true
|
||||
// ranges of * ALWAYS count as a match, because when downloading we allow
|
||||
// prereleases to match * if there are ONLY prereleases
|
||||
if (requested.type === 'range' && requested.fetchSpec === '*') return true
|
||||
|
||||
if (requested.type === 'directory') {
|
||||
if (!child.isLink) return false
|
||||
return path.relative(child.realpath, requested.fetchSpec) === ''
|
||||
}
|
||||
|
||||
if (requested.type === 'git' && child.fromShrinkwrap) {
|
||||
const fromSw = child.package._from ? npa(child.package._from) : child.fromShrinkwrap
|
||||
fromSw.name = requested.name // we're only checking specifiers here
|
||||
if (fromSw.toString() === requested.toString()) return true
|
||||
}
|
||||
|
||||
if (!registryTypes[requested.type]) {
|
||||
var childReq = child.package._requested
|
||||
if (childReq) {
|
||||
if (childReq.rawSpec === requested.rawSpec) return true
|
||||
if (childReq.type === requested.type && childReq.saveSpec === requested.saveSpec) return true
|
||||
}
|
||||
// If _requested didn't exist OR if it didn't match then we'll try using
|
||||
// _from. We pass it through npa to normalize the specifier.
|
||||
// This can happen when installing from an `npm-shrinkwrap.json` where `_requested` will
|
||||
// be the tarball URL from `resolved` and thus can't match what's in the `package.json`.
|
||||
// In those cases _from, will be preserved and we can compare that to ensure that they
|
||||
// really came from the same sources.
|
||||
// You'll see this scenario happen with at least tags and git dependencies.
|
||||
// Some buggy clients will write spaces into the module name part of a _from.
|
||||
if (child.package._from) {
|
||||
var fromReq = npa.resolve(moduleName(child), child.package._from.replace(new RegExp('^\\s*' + moduleName(child) + '\\s*@'), ''))
|
||||
if (fromReq.rawSpec === requested.rawSpec) return true
|
||||
if (fromReq.type === requested.type && fromReq.saveSpec && fromReq.saveSpec === requested.saveSpec) return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
try {
|
||||
return semver.satisfies(child.package.version, requested.fetchSpec, true)
|
||||
} catch (e) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
function childDependencySpecifier (tree, name, spec, where) {
|
||||
return npa.resolve(name, spec, where || packageRelativePath(tree))
|
||||
}
|
||||
|
||||
exports.computeMetadata = computeMetadata
|
||||
function computeMetadata (tree, seen) {
|
||||
if (!seen) seen = new Set()
|
||||
if (!tree || seen.has(tree)) return
|
||||
seen.add(tree)
|
||||
if (tree.parent == null) {
|
||||
resetMetadata(tree)
|
||||
tree.isTop = true
|
||||
}
|
||||
tree.location = flatNameFromTree(tree)
|
||||
|
||||
function findChild (name, spec, kind) {
|
||||
try {
|
||||
var req = childDependencySpecifier(tree, name, spec)
|
||||
} catch (err) {
|
||||
return
|
||||
}
|
||||
var child = findRequirement(tree, req.name, req)
|
||||
if (child) {
|
||||
resolveWithExistingModule(child, tree)
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const deps = tree.package.dependencies || {}
|
||||
const reqs = tree.swRequires || {}
|
||||
for (let name of Object.keys(deps)) {
|
||||
if (findChild(name, deps[name])) continue
|
||||
if (name in reqs && findChild(name, reqs[name])) continue
|
||||
tree.missingDeps[name] = deps[name]
|
||||
}
|
||||
if (tree.isTop) {
|
||||
const devDeps = tree.package.devDependencies || {}
|
||||
for (let name of Object.keys(devDeps)) {
|
||||
if (findChild(name, devDeps[name])) continue
|
||||
tree.missingDevDeps[name] = devDeps[name]
|
||||
}
|
||||
}
|
||||
|
||||
tree.children.filter((child) => !child.removed).forEach((child) => computeMetadata(child, seen))
|
||||
|
||||
return tree
|
||||
}
|
||||
|
||||
function isDep (tree, child) {
|
||||
var name = moduleName(child)
|
||||
var prodVer = isProdDep(tree, name)
|
||||
var devVer = isDevDep(tree, name)
|
||||
|
||||
try {
|
||||
var prodSpec = childDependencySpecifier(tree, name, prodVer)
|
||||
} catch (err) {
|
||||
return {isDep: true, isProdDep: false, isDevDep: false}
|
||||
}
|
||||
var matches
|
||||
if (prodSpec) matches = doesChildVersionMatch(child, prodSpec, tree)
|
||||
if (matches) return {isDep: true, isProdDep: prodSpec, isDevDep: false}
|
||||
if (devVer === prodVer) return {isDep: child.fromShrinkwrap, isProdDep: false, isDevDep: false}
|
||||
try {
|
||||
var devSpec = childDependencySpecifier(tree, name, devVer)
|
||||
return {isDep: doesChildVersionMatch(child, devSpec, tree) || child.fromShrinkwrap, isProdDep: false, isDevDep: devSpec}
|
||||
} catch (err) {
|
||||
return {isDep: child.fromShrinkwrap, isProdDep: false, isDevDep: false}
|
||||
}
|
||||
}
|
||||
|
||||
function addRequiredDep (tree, child) {
|
||||
var dep = isDep(tree, child)
|
||||
if (!dep.isDep) return false
|
||||
replaceModuleByPath(child, 'requiredBy', tree)
|
||||
replaceModuleByName(tree, 'requires', child)
|
||||
if (dep.isProdDep && tree.missingDeps) delete tree.missingDeps[moduleName(child)]
|
||||
if (dep.isDevDep && tree.missingDevDeps) delete tree.missingDevDeps[moduleName(child)]
|
||||
return true
|
||||
}
|
||||
|
||||
exports.removeObsoleteDep = removeObsoleteDep
|
||||
function removeObsoleteDep (child, log) {
|
||||
if (child.removed) return
|
||||
child.removed = true
|
||||
if (log) {
|
||||
log.silly('removeObsoleteDep', 'removing ' + packageId(child) +
|
||||
' from the tree as its been replaced by a newer version or is no longer required')
|
||||
}
|
||||
// remove from physical tree
|
||||
if (child.parent) {
|
||||
child.parent.children = child.parent.children.filter(function (pchild) { return pchild !== child })
|
||||
}
|
||||
// remove from logical tree
|
||||
var requires = child.requires || []
|
||||
requires.forEach(function (requirement) {
|
||||
requirement.requiredBy = requirement.requiredBy.filter(function (reqBy) { return reqBy !== child })
|
||||
// we don't just check requirement.requires because that doesn't account
|
||||
// for circular deps. isExtraneous does.
|
||||
if (isExtraneous(requirement)) removeObsoleteDep(requirement, log)
|
||||
})
|
||||
}
|
||||
|
||||
function packageRelativePath (tree) {
|
||||
if (!tree) return ''
|
||||
var requested = tree.package._requested || {}
|
||||
var isLocal = requested.type === 'directory' || requested.type === 'file'
|
||||
return isLocal ? requested.fetchSpec
|
||||
: (tree.isLink || tree.isInLink) && !preserveSymlinks() ? tree.realpath
|
||||
: tree.path
|
||||
}
|
||||
|
||||
function matchingDep (tree, name) {
|
||||
if (!tree || !tree.package) return
|
||||
if (tree.package.dependencies && tree.package.dependencies[name]) return tree.package.dependencies[name]
|
||||
if (tree.package.devDependencies && tree.package.devDependencies[name]) return tree.package.devDependencies[name]
|
||||
}
|
||||
|
||||
exports.getAllMetadata = function (args, tree, where, next) {
|
||||
asyncMap(args, function (arg, done) {
|
||||
let spec
|
||||
try {
|
||||
spec = npa(arg)
|
||||
} catch (e) {
|
||||
return done(e)
|
||||
}
|
||||
if (spec.type !== 'file' && spec.type !== 'directory' && (spec.name == null || spec.rawSpec === '')) {
|
||||
return fs.stat(path.join(arg, 'package.json'), (err) => {
|
||||
if (err) {
|
||||
var version = matchingDep(tree, spec.name)
|
||||
if (version) {
|
||||
try {
|
||||
return fetchPackageMetadata(npa.resolve(spec.name, version), where, done)
|
||||
} catch (e) {
|
||||
return done(e)
|
||||
}
|
||||
} else {
|
||||
return fetchPackageMetadata(spec, where, done)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return fetchPackageMetadata(npa('file:' + arg), where, done)
|
||||
} catch (e) {
|
||||
return done(e)
|
||||
}
|
||||
}
|
||||
})
|
||||
} else {
|
||||
return fetchPackageMetadata(spec, where, done)
|
||||
}
|
||||
}, next)
|
||||
}
|
||||
|
||||
// Add a list of args to tree's top level dependencies
|
||||
exports.loadRequestedDeps = function (args, tree, saveToDependencies, log, next) {
|
||||
validate('AOOF', [args, tree, log, next])
|
||||
asyncMap(args, function (pkg, done) {
|
||||
var depLoaded = andAddParentToErrors(tree, done)
|
||||
resolveWithNewModule(pkg, tree, log.newGroup('loadRequestedDeps'), iferr(depLoaded, function (child, tracker) {
|
||||
validate('OO', arguments)
|
||||
if (npm.config.get('global')) {
|
||||
child.isGlobal = true
|
||||
}
|
||||
var childName = moduleName(child)
|
||||
child.saveSpec = computeVersionSpec(tree, child)
|
||||
child.userRequired = true
|
||||
child.save = getSaveType(tree, child)
|
||||
const types = ['dependencies', 'devDependencies', 'optionalDependencies']
|
||||
if (child.save) {
|
||||
tree.package[child.save][childName] = child.saveSpec
|
||||
// Astute readers might notice that this exact same code exists in
|
||||
// save.js under a different guise. That code is responsible for deps
|
||||
// being removed from the final written `package.json`. The removal in
|
||||
// this function is specifically to prevent "installed as both X and Y"
|
||||
// warnings when moving an existing dep between different dep fields.
|
||||
//
|
||||
// Or, try it by removing this loop, and do `npm i -P x && npm i -D x`
|
||||
for (let saveType of types) {
|
||||
if (child.save !== saveType) {
|
||||
delete tree.package[saveType][childName]
|
||||
}
|
||||
}
|
||||
if (child.save === 'optionalDependencies') tree.package.dependencies[childName] = child.saveSpec
|
||||
}
|
||||
|
||||
// For things the user asked to install, that aren't a dependency (or
|
||||
// won't be when we're done), flag it as "depending" on the user
|
||||
// themselves, so we don't remove it as a dep that no longer exists
|
||||
var childIsDep = addRequiredDep(tree, child)
|
||||
if (!childIsDep) child.userRequired = true
|
||||
depLoaded(null, child, tracker)
|
||||
}))
|
||||
}, andForEachChild(loadDeps, andFinishTracker(log, next)))
|
||||
}
|
||||
|
||||
function isNotEmpty (value) {
|
||||
return value != null && value !== ''
|
||||
}
|
||||
|
||||
exports.computeVersionSpec = computeVersionSpec
|
||||
function computeVersionSpec (tree, child) {
|
||||
validate('OO', arguments)
|
||||
var requested
|
||||
var childReq = child.package._requested
|
||||
if (child.isLink) {
|
||||
requested = npa.resolve(child.package.name, 'file:' + child.realpath, getTop(tree).path)
|
||||
} else if (childReq && (isNotEmpty(childReq.saveSpec) || (isNotEmpty(childReq.rawSpec) && isNotEmpty(childReq.fetchSpec)))) {
|
||||
requested = child.package._requested
|
||||
} else if (child.package._from) {
|
||||
requested = npa(child.package._from, tree.path)
|
||||
} else {
|
||||
requested = npa.resolve(child.package.name, child.package.version)
|
||||
}
|
||||
if (isRegistry(requested)) {
|
||||
var version = child.package.version
|
||||
var rangeDescriptor = ''
|
||||
if (semver.valid(version, true) &&
|
||||
semver.gte(version, '0.1.0', true) &&
|
||||
!npm.config.get('save-exact')) {
|
||||
rangeDescriptor = npm.config.get('save-prefix')
|
||||
}
|
||||
return rangeDescriptor + version
|
||||
} else if (requested.type === 'directory' || requested.type === 'file') {
|
||||
return 'file:' + unixFormatPath(path.relative(getTop(tree).path, requested.fetchSpec))
|
||||
} else {
|
||||
return requested.saveSpec || requested.rawSpec
|
||||
}
|
||||
}
|
||||
|
||||
function moduleNameMatches (name) {
|
||||
return function (child) { return moduleName(child) === name }
|
||||
}
|
||||
|
||||
// while this implementation does not require async calling, doing so
|
||||
// gives this a consistent interface with loadDeps et al
|
||||
exports.removeDeps = function (args, tree, saveToDependencies, next) {
|
||||
validate('AOSF|AOZF', [args, tree, saveToDependencies, next])
|
||||
for (let pkg of args) {
|
||||
var pkgName = moduleName(pkg)
|
||||
var toRemove = tree.children.filter(moduleNameMatches(pkgName))
|
||||
var pkgToRemove = toRemove[0] || createChild({package: {name: pkgName}})
|
||||
var saveType = getSaveType(tree, pkg) || 'dependencies'
|
||||
if (tree.isTop && saveToDependencies) {
|
||||
pkgToRemove.save = saveType
|
||||
}
|
||||
if (tree.package[saveType][pkgName]) {
|
||||
delete tree.package[saveType][pkgName]
|
||||
if (saveType === 'optionalDependencies' && tree.package.dependencies[pkgName]) {
|
||||
delete tree.package.dependencies[pkgName]
|
||||
}
|
||||
}
|
||||
replaceModuleByPath(tree, 'removedChildren', pkgToRemove)
|
||||
for (let parent of pkgToRemove.requiredBy) {
|
||||
parent.requires = parent.requires.filter((child) => child !== pkgToRemove)
|
||||
}
|
||||
pkgToRemove.requiredBy = pkgToRemove.requiredBy.filter((parent) => parent !== tree)
|
||||
flagAsRemoving(pkgToRemove)
|
||||
}
|
||||
next()
|
||||
}
|
||||
|
||||
function flagAsRemoving (toRemove, seen) {
|
||||
if (!seen) seen = new Set()
|
||||
if (seen.has(toRemove)) return
|
||||
seen.add(toRemove)
|
||||
toRemove.removing = true
|
||||
toRemove.requires.forEach((required) => {
|
||||
flagAsRemoving(required, seen)
|
||||
})
|
||||
}
|
||||
|
||||
exports.removeExtraneous = function (args, tree, next) {
|
||||
for (let pkg of args) {
|
||||
var pkgName = moduleName(pkg)
|
||||
var toRemove = tree.children.filter(moduleNameMatches(pkgName))
|
||||
if (toRemove.length) {
|
||||
removeObsoleteDep(toRemove[0])
|
||||
}
|
||||
}
|
||||
next()
|
||||
}
|
||||
|
||||
function andForEachChild (load, next) {
|
||||
validate('F', [next])
|
||||
next = dezalgo(next)
|
||||
return function (er, children, logs) {
|
||||
// when children is empty, logs won't be passed in at all (asyncMap is weird)
|
||||
// so shortcircuit before arg validation
|
||||
if (!er && (!children || children.length === 0)) return next()
|
||||
validate('EAA', arguments)
|
||||
if (er) return next(er)
|
||||
assert(children.length === logs.length)
|
||||
var cmds = []
|
||||
for (var ii = 0; ii < children.length; ++ii) {
|
||||
cmds.push([load, children[ii], logs[ii]])
|
||||
}
|
||||
var sortedCmds = cmds.sort(function installOrder (aa, bb) {
|
||||
return moduleName(aa[1]).localeCompare(moduleName(bb[1]))
|
||||
})
|
||||
chain(sortedCmds, next)
|
||||
}
|
||||
}
|
||||
|
||||
function isDepOptional (tree, name, pkg) {
|
||||
if (pkg.package && pkg.package._optional) return true
|
||||
const optDeps = tree.package.optionalDependencies
|
||||
if (optDeps && optDeps[name] != null) return true
|
||||
|
||||
const devDeps = tree.package.devDependencies
|
||||
if (devDeps && devDeps[name] != null) {
|
||||
const includeDev = npm.config.get('dev') ||
|
||||
(!/^prod(uction)?$/.test(npm.config.get('only')) && !npm.config.get('production')) ||
|
||||
/^dev(elopment)?$/.test(npm.config.get('only')) ||
|
||||
/^dev(elopment)?$/.test(npm.config.get('also'))
|
||||
return !includeDev
|
||||
}
|
||||
const prodDeps = tree.package.dependencies
|
||||
if (prodDeps && prodDeps[name] != null) {
|
||||
const includeProd = !/^dev(elopment)?$/.test(npm.config.get('only'))
|
||||
return !includeProd
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
exports.failedDependency = failedDependency
|
||||
function failedDependency (tree, name, pkg) {
|
||||
if (name) {
|
||||
if (isDepOptional(tree, name, pkg || {})) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
tree.failed = true
|
||||
|
||||
if (tree.isTop) return true
|
||||
|
||||
if (tree.userRequired) return true
|
||||
|
||||
if (!tree.requiredBy) return false
|
||||
|
||||
let anyFailed = false
|
||||
for (var ii = 0; ii < tree.requiredBy.length; ++ii) {
|
||||
var requireParent = tree.requiredBy[ii]
|
||||
if (failedDependency(requireParent, moduleName(tree), tree)) {
|
||||
anyFailed = true
|
||||
}
|
||||
}
|
||||
return anyFailed
|
||||
}
|
||||
|
||||
function andHandleOptionalErrors (log, tree, name, done) {
|
||||
validate('OOSF', arguments)
|
||||
return function (er, child, childLog) {
|
||||
if (!er) validate('OO', [child, childLog])
|
||||
if (!er) return done(er, child, childLog)
|
||||
var isFatal = failedDependency(tree, name)
|
||||
if (er && !isFatal) {
|
||||
reportOptionalFailure(tree, name, er)
|
||||
return done()
|
||||
} else {
|
||||
return done(er, child, childLog)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.prefetchDeps = prefetchDeps
|
||||
function prefetchDeps (tree, deps, log, next) {
|
||||
validate('OOOF', arguments)
|
||||
var skipOptional = !npm.config.get('optional')
|
||||
var seen = new Set()
|
||||
const finished = andFinishTracker(log, next)
|
||||
const fpm = BB.promisify(fetchPackageMetadata)
|
||||
resolveBranchDeps(tree.package, deps).then(
|
||||
() => finished(), finished
|
||||
)
|
||||
|
||||
function resolveBranchDeps (pkg, deps) {
|
||||
return BB.resolve(null).then(() => {
|
||||
var allDependencies = Object.keys(deps).map((dep) => {
|
||||
return npa.resolve(dep, deps[dep])
|
||||
}).filter((dep) => {
|
||||
return isRegistry(dep) &&
|
||||
!seen.has(dep.toString()) &&
|
||||
!findRequirement(tree, dep.name, dep)
|
||||
})
|
||||
if (skipOptional) {
|
||||
var optDeps = pkg.optionalDependencies || {}
|
||||
allDependencies = allDependencies.filter((dep) => !optDeps[dep.name])
|
||||
}
|
||||
return BB.map(allDependencies, (dep) => {
|
||||
seen.add(dep.toString())
|
||||
return fpm(dep, '', {tracker: log.newItem('fetchMetadata')}).then(
|
||||
(pkg) => {
|
||||
return pkg && pkg.dependencies && resolveBranchDeps(pkg, pkg.dependencies)
|
||||
},
|
||||
() => null
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Load any missing dependencies in the given tree
|
||||
exports.loadDeps = loadDeps
|
||||
function loadDeps (tree, log, next) {
|
||||
validate('OOF', arguments)
|
||||
if (tree.loaded || (tree.parent && tree.parent.failed) || tree.removed) return andFinishTracker.now(log, next)
|
||||
if (tree.parent) tree.loaded = true
|
||||
if (!tree.package.dependencies) tree.package.dependencies = {}
|
||||
asyncMap(Object.keys(tree.package.dependencies), function (dep, done) {
|
||||
var version = tree.package.dependencies[dep]
|
||||
addDependency(dep, version, tree, log.newGroup('loadDep:' + dep), andHandleOptionalErrors(log, tree, dep, done))
|
||||
}, andForEachChild(loadDeps, andFinishTracker(log, next)))
|
||||
}
|
||||
|
||||
// Load development dependencies into the given tree
|
||||
exports.loadDevDeps = function (tree, log, next) {
|
||||
validate('OOF', arguments)
|
||||
if (!tree.package.devDependencies) return andFinishTracker.now(log, next)
|
||||
asyncMap(Object.keys(tree.package.devDependencies), function (dep, done) {
|
||||
// things defined as both dev dependencies and regular dependencies are treated
|
||||
// as the former
|
||||
if (tree.package.dependencies[dep]) return done()
|
||||
|
||||
var logGroup = log.newGroup('loadDevDep:' + dep)
|
||||
addDependency(dep, tree.package.devDependencies[dep], tree, logGroup, andHandleOptionalErrors(log, tree, dep, done))
|
||||
}, andForEachChild(loadDeps, andFinishTracker(log, next)))
|
||||
}
|
||||
|
||||
var loadExtraneous = exports.loadExtraneous = function (tree, log, next) {
|
||||
var seen = new Set()
|
||||
|
||||
function loadExtraneous (tree) {
|
||||
if (seen.has(tree)) return
|
||||
seen.add(tree)
|
||||
for (var child of tree.children) {
|
||||
if (child.loaded) continue
|
||||
resolveWithExistingModule(child, tree)
|
||||
loadExtraneous(child)
|
||||
}
|
||||
}
|
||||
loadExtraneous(tree)
|
||||
log.finish()
|
||||
next()
|
||||
}
|
||||
|
||||
exports.loadExtraneous.andResolveDeps = function (tree, log, next) {
|
||||
validate('OOF', arguments)
|
||||
// For canonicalized trees (eg from shrinkwrap) we don't want to bother
|
||||
// resolving the dependencies of extraneous deps.
|
||||
if (tree.loaded) return loadExtraneous(tree, log, next)
|
||||
asyncMap(tree.children.filter(function (child) { return !child.loaded }), function (child, done) {
|
||||
resolveWithExistingModule(child, tree)
|
||||
done(null, child, log)
|
||||
}, andForEachChild(loadDeps, andFinishTracker(log, next)))
|
||||
}
|
||||
|
||||
function addDependency (name, versionSpec, tree, log, done) {
|
||||
validate('SSOOF', arguments)
|
||||
var next = andAddParentToErrors(tree, done)
|
||||
try {
|
||||
var req = childDependencySpecifier(tree, name, versionSpec)
|
||||
if (tree.swRequires && tree.swRequires[name]) {
|
||||
var swReq = childDependencySpecifier(tree, name, tree.swRequires[name], tree.package._where)
|
||||
}
|
||||
} catch (err) {
|
||||
return done(err)
|
||||
}
|
||||
var child = findRequirement(tree, name, req)
|
||||
if (!child && swReq) child = findRequirement(tree, name, swReq)
|
||||
if (hasModernMeta(child)) {
|
||||
resolveWithExistingModule(child, tree)
|
||||
if (child.package._shrinkwrap === undefined) {
|
||||
readShrinkwrap.andInflate(child, function (er) { next(er, child, log) })
|
||||
} else {
|
||||
next(null, child, log)
|
||||
}
|
||||
} else {
|
||||
if (child) {
|
||||
if (req.registry) {
|
||||
req = childDependencySpecifier(tree, name, child.package.version)
|
||||
}
|
||||
if (child.fromBundle) reportBundleOverride(child, log)
|
||||
removeObsoleteDep(child, log)
|
||||
}
|
||||
fetchPackageMetadata(req, packageRelativePath(tree), {tracker: log.newItem('fetchMetadata')}, iferr(next, function (pkg) {
|
||||
resolveWithNewModule(pkg, tree, log, next)
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
function getTop (pkg) {
|
||||
const seen = new Set()
|
||||
while (pkg.parent && !seen.has(pkg.parent)) {
|
||||
pkg = pkg.parent
|
||||
seen.add(pkg)
|
||||
}
|
||||
return pkg
|
||||
}
|
||||
|
||||
function reportBundleOverride (child, log) {
|
||||
const code = 'EBUNDLEOVERRIDE'
|
||||
const top = getTop(child.fromBundle)
|
||||
const bundlerId = packageId(child.fromBundle)
|
||||
if (!top.warnings.some((w) => {
|
||||
return w.code === code
|
||||
})) {
|
||||
const err = new Error(`${bundlerId} had bundled packages that do not match the required version(s). They have been replaced with non-bundled versions.`)
|
||||
err.code = code
|
||||
top.warnings.push(err)
|
||||
}
|
||||
if (log) log.verbose('bundle', `${code}: Replacing ${bundlerId}'s bundled version of ${moduleName(child)} with ${packageId(child)}.`)
|
||||
}
|
||||
|
||||
function resolveWithExistingModule (child, tree) {
|
||||
validate('OO', arguments)
|
||||
addRequiredDep(tree, child)
|
||||
if (tree.parent && child.parent !== tree) updatePhantomChildren(tree.parent, child)
|
||||
}
|
||||
|
||||
var updatePhantomChildren = exports.updatePhantomChildren = function (current, child) {
|
||||
validate('OO', arguments)
|
||||
while (current && current !== child.parent) {
|
||||
if (!current.phantomChildren) current.phantomChildren = {}
|
||||
current.phantomChildren[moduleName(child)] = child
|
||||
current = current.parent
|
||||
}
|
||||
}
|
||||
|
||||
exports._replaceModuleByPath = replaceModuleByPath
|
||||
function replaceModuleByPath (obj, key, child) {
|
||||
return replaceModule(obj, key, child, function (replacing, child) {
|
||||
return replacing.path === child.path
|
||||
})
|
||||
}
|
||||
|
||||
exports._replaceModuleByName = replaceModuleByName
|
||||
function replaceModuleByName (obj, key, child) {
|
||||
var childName = moduleName(child)
|
||||
return replaceModule(obj, key, child, function (replacing, child) {
|
||||
return moduleName(replacing) === childName
|
||||
})
|
||||
}
|
||||
|
||||
function replaceModule (obj, key, child, matchBy) {
|
||||
validate('OSOF', arguments)
|
||||
if (!obj[key]) obj[key] = []
|
||||
// we replace children with a new array object instead of mutating it
|
||||
// because mutating it results in weird failure states.
|
||||
// I would very much like to know _why_ this is. =/
|
||||
var children = [].concat(obj[key])
|
||||
for (var replaceAt = 0; replaceAt < children.length; ++replaceAt) {
|
||||
if (matchBy(children[replaceAt], child)) break
|
||||
}
|
||||
var replacing = children.splice(replaceAt, 1, child)
|
||||
obj[key] = children
|
||||
return replacing[0]
|
||||
}
|
||||
|
||||
function resolveWithNewModule (pkg, tree, log, next) {
|
||||
validate('OOOF', arguments)
|
||||
|
||||
log.silly('resolveWithNewModule', packageId(pkg), 'checking installable status')
|
||||
return isInstallable(pkg, (err) => {
|
||||
let installable = !err
|
||||
addBundled(pkg, (bundleErr) => {
|
||||
var parent = earliestInstallable(tree, tree, pkg, log) || tree
|
||||
var isLink = pkg._requested.type === 'directory'
|
||||
var child = createChild({
|
||||
package: pkg,
|
||||
parent: parent,
|
||||
path: path.join(parent.isLink ? parent.realpath : parent.path, 'node_modules', pkg.name),
|
||||
realpath: isLink ? pkg._requested.fetchSpec : path.join(parent.realpath, 'node_modules', pkg.name),
|
||||
children: pkg._bundled || [],
|
||||
isLink: isLink,
|
||||
isInLink: parent.isLink,
|
||||
knownInstallable: installable
|
||||
})
|
||||
if (!installable || bundleErr) child.failed = true
|
||||
delete pkg._bundled
|
||||
var hasBundled = child.children.length
|
||||
|
||||
var replaced = replaceModuleByName(parent, 'children', child)
|
||||
if (replaced) {
|
||||
if (replaced.fromBundle) reportBundleOverride(replaced, log)
|
||||
removeObsoleteDep(replaced)
|
||||
}
|
||||
addRequiredDep(tree, child)
|
||||
child.location = flatNameFromTree(child)
|
||||
|
||||
if (tree.parent && parent !== tree) updatePhantomChildren(tree.parent, child)
|
||||
|
||||
if (hasBundled) {
|
||||
inflateBundled(child, child, child.children)
|
||||
}
|
||||
|
||||
if (pkg._shrinkwrap && pkg._shrinkwrap.dependencies) {
|
||||
return inflateShrinkwrap(child, pkg._shrinkwrap, (swErr) => {
|
||||
if (swErr) child.failed = true
|
||||
next(err || bundleErr || swErr, child, log)
|
||||
})
|
||||
}
|
||||
next(err || bundleErr, child, log)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
var validatePeerDeps = exports.validatePeerDeps = function (tree, onInvalid) {
|
||||
if (!tree.package.peerDependencies) return
|
||||
Object.keys(tree.package.peerDependencies).forEach(function (pkgname) {
|
||||
var version = tree.package.peerDependencies[pkgname]
|
||||
try {
|
||||
var spec = npa.resolve(pkgname, version)
|
||||
} catch (e) {}
|
||||
var match = spec && findRequirement(tree.parent || tree, pkgname, spec)
|
||||
if (!match) onInvalid(tree, pkgname, version)
|
||||
})
|
||||
}
|
||||
|
||||
exports.validateAllPeerDeps = function (tree, onInvalid) {
|
||||
validateAllPeerDeps(tree, onInvalid, new Set())
|
||||
}
|
||||
|
||||
function validateAllPeerDeps (tree, onInvalid, seen) {
|
||||
validate('OFO', arguments)
|
||||
if (seen.has(tree)) return
|
||||
seen.add(tree)
|
||||
validatePeerDeps(tree, onInvalid)
|
||||
tree.children.forEach(function (child) { validateAllPeerDeps(child, onInvalid, seen) })
|
||||
}
|
||||
|
||||
// Determine if a module requirement is already met by the tree at or above
|
||||
// our current location in the tree.
|
||||
var findRequirement = exports.findRequirement = function (tree, name, requested, requestor) {
|
||||
validate('OSO', [tree, name, requested])
|
||||
if (!requestor) requestor = tree
|
||||
var nameMatch = function (child) {
|
||||
return moduleName(child) === name && child.parent && !child.removed
|
||||
}
|
||||
var versionMatch = function (child) {
|
||||
return doesChildVersionMatch(child, requested, requestor)
|
||||
}
|
||||
if (nameMatch(tree)) {
|
||||
// this *is* the module, but it doesn't match the version, so a
|
||||
// new copy will have to be installed
|
||||
return versionMatch(tree) ? tree : null
|
||||
}
|
||||
|
||||
var matches = tree.children.filter(nameMatch)
|
||||
if (matches.length) {
|
||||
matches = matches.filter(versionMatch)
|
||||
// the module exists as a dependent, but the version doesn't match, so
|
||||
// a new copy will have to be installed above here
|
||||
if (matches.length) return matches[0]
|
||||
return null
|
||||
}
|
||||
if (tree.isTop) return null
|
||||
if (!preserveSymlinks() && /^[.][.][\\/]/.test(path.relative(tree.parent.realpath, tree.realpath))) return null
|
||||
return findRequirement(tree.parent, name, requested, requestor)
|
||||
}
|
||||
|
||||
function preserveSymlinks () {
|
||||
if (!('NODE_PRESERVE_SYMLINKS' in process.env)) return false
|
||||
const value = process.env.NODE_PRESERVE_SYMLINKS
|
||||
if (value == null || value === '' || value === 'false' || value === 'no' || value === '0') return false
|
||||
return true
|
||||
}
|
||||
|
||||
// Find the highest level in the tree that we can install this module in.
|
||||
// If the module isn't installed above us yet, that'd be the very top.
|
||||
// If it is, then it's the level below where its installed.
|
||||
var earliestInstallable = exports.earliestInstallable = function (requiredBy, tree, pkg, log) {
|
||||
validate('OOOO', arguments)
|
||||
|
||||
function undeletedModuleMatches (child) {
|
||||
return !child.removed && moduleName(child) === pkg.name
|
||||
}
|
||||
const undeletedMatches = tree.children.filter(undeletedModuleMatches)
|
||||
if (undeletedMatches.length) {
|
||||
// if there's a conflict with another child AT THE SAME level then we're replacing it, so
|
||||
// mark it as removed and continue with resolution normally.
|
||||
if (tree === requiredBy) {
|
||||
undeletedMatches.forEach((pkg) => {
|
||||
if (pkg.fromBundle) reportBundleOverride(pkg, log)
|
||||
removeObsoleteDep(pkg, log)
|
||||
})
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
// If any of the children of this tree have conflicting
|
||||
// binaries then we need to decline to install this package here.
|
||||
var binaryMatches = pkg.bin && tree.children.some(function (child) {
|
||||
if (child.removed || !child.package.bin) return false
|
||||
return Object.keys(child.package.bin).some(function (bin) {
|
||||
return pkg.bin[bin]
|
||||
})
|
||||
})
|
||||
|
||||
if (binaryMatches) return null
|
||||
|
||||
// if this tree location requested the same module then we KNOW it
|
||||
// isn't compatible because if it were findRequirement would have
|
||||
// found that version.
|
||||
var deps = tree.package.dependencies || {}
|
||||
if (!tree.removed && requiredBy !== tree && deps[pkg.name]) {
|
||||
return null
|
||||
}
|
||||
|
||||
var devDeps = tree.package.devDependencies || {}
|
||||
if (tree.isTop && devDeps[pkg.name]) {
|
||||
var requested = childDependencySpecifier(tree, pkg.name, devDeps[pkg.name])
|
||||
if (!doesChildVersionMatch({package: pkg}, requested, tree)) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
if (tree.phantomChildren && tree.phantomChildren[pkg.name]) return null
|
||||
|
||||
if (tree.isTop) return tree
|
||||
if (tree.isGlobal) return tree
|
||||
|
||||
if (npm.config.get('global-style') && tree.parent.isTop) return tree
|
||||
if (npm.config.get('legacy-bundling')) return tree
|
||||
|
||||
if (!preserveSymlinks() && /^[.][.][\\/]/.test(path.relative(tree.parent.realpath, tree.realpath))) return tree
|
||||
|
||||
return (earliestInstallable(requiredBy, tree.parent, pkg, log) || tree)
|
||||
}
|
260
website/node_modules/npm/lib/install/diff-trees.js
generated
vendored
Normal file
260
website/node_modules/npm/lib/install/diff-trees.js
generated
vendored
Normal file
@@ -0,0 +1,260 @@
|
||||
'use strict'
|
||||
var npm = require('../npm.js')
|
||||
var validate = require('aproba')
|
||||
var npa = require('npm-package-arg')
|
||||
var flattenTree = require('./flatten-tree.js')
|
||||
var isOnlyDev = require('./is-only-dev.js')
|
||||
var log = require('npmlog')
|
||||
var path = require('path')
|
||||
var ssri = require('ssri')
|
||||
var moduleName = require('../utils/module-name.js')
|
||||
var isOnlyOptional = require('./is-only-optional.js')
|
||||
|
||||
// we don't use get-requested because we're operating on files on disk, and
|
||||
// we don't want to extropolate from what _should_ be there.
|
||||
function pkgRequested (pkg) {
|
||||
return pkg._requested || (pkg._resolved && npa(pkg._resolved)) || (pkg._from && npa(pkg._from))
|
||||
}
|
||||
|
||||
function nonRegistrySource (requested) {
|
||||
if (fromGit(requested)) return true
|
||||
if (fromLocal(requested)) return true
|
||||
if (fromRemote(requested)) return true
|
||||
return false
|
||||
}
|
||||
|
||||
function fromRemote (requested) {
|
||||
if (requested.type === 'remote') return true
|
||||
}
|
||||
|
||||
function fromLocal (requested) {
|
||||
// local is an npm@3 type that meant "file"
|
||||
if (requested.type === 'file' || requested.type === 'directory' || requested.type === 'local') return true
|
||||
return false
|
||||
}
|
||||
|
||||
function fromGit (requested) {
|
||||
if (requested.type === 'hosted' || requested.type === 'git') return true
|
||||
return false
|
||||
}
|
||||
|
||||
function pkgIntegrity (pkg) {
|
||||
try {
|
||||
// dist is provided by the registry
|
||||
var sri = (pkg.dist && pkg.dist.integrity) ||
|
||||
// _integrity is provided by pacote
|
||||
pkg._integrity ||
|
||||
// _shasum is legacy
|
||||
(pkg._shasum && ssri.fromHex(pkg._shasum, 'sha1').toString())
|
||||
if (!sri) return
|
||||
var integrity = ssri.parse(sri)
|
||||
if (Object.keys(integrity).length === 0) return
|
||||
return integrity
|
||||
} catch (ex) {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
function sriMatch (aa, bb) {
|
||||
if (!aa || !bb) return false
|
||||
for (let algo of Object.keys(aa)) {
|
||||
if (!bb[algo]) continue
|
||||
for (let aaHash of aa[algo]) {
|
||||
for (let bbHash of bb[algo]) {
|
||||
return aaHash.digest === bbHash.digest
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
function pkgAreEquiv (aa, bb) {
|
||||
// coming in we know they share a path…
|
||||
|
||||
// if one is inside a link and the other is not, then they are not equivalent
|
||||
// this happens when we're replacing a linked dep with a non-linked version
|
||||
if (aa.isInLink !== bb.isInLink) return false
|
||||
// if they share package metadata _identity_, they're the same thing
|
||||
if (aa.package === bb.package) return true
|
||||
// if they share integrity information, they're the same thing
|
||||
var aaIntegrity = pkgIntegrity(aa.package)
|
||||
var bbIntegrity = pkgIntegrity(bb.package)
|
||||
if (aaIntegrity || bbIntegrity) return sriMatch(aaIntegrity, bbIntegrity)
|
||||
|
||||
// if they're links and they share the same target, they're the same thing
|
||||
if (aa.isLink && bb.isLink) return aa.realpath === bb.realpath
|
||||
|
||||
// if we can't determine both their sources then we have no way to know
|
||||
// if they're the same thing, so we have to assume they aren't
|
||||
var aaReq = pkgRequested(aa.package)
|
||||
var bbReq = pkgRequested(bb.package)
|
||||
if (!aaReq || !bbReq) return false
|
||||
|
||||
if (fromGit(aaReq) && fromGit(bbReq)) {
|
||||
// if both are git and share a _resolved specifier (one with the
|
||||
// comittish replaced by a commit hash) then they're the same
|
||||
return aa.package._resolved && bb.package._resolved &&
|
||||
aa.package._resolved === bb.package._resolved
|
||||
}
|
||||
|
||||
// we have to give up trying to find matches for non-registry sources at this point…
|
||||
if (nonRegistrySource(aaReq) || nonRegistrySource(bbReq)) return false
|
||||
|
||||
// finally, if they ARE a registry source then version matching counts
|
||||
return aa.package.version === bb.package.version
|
||||
}
|
||||
|
||||
function pushAll (aa, bb) {
|
||||
Array.prototype.push.apply(aa, bb)
|
||||
}
|
||||
|
||||
module.exports = function (oldTree, newTree, differences, log, next) {
|
||||
validate('OOAOF', arguments)
|
||||
pushAll(differences, sortActions(diffTrees(oldTree, newTree)))
|
||||
log.finish()
|
||||
next()
|
||||
}
|
||||
|
||||
function isNotTopOrExtraneous (node) {
|
||||
return !node.isTop && !node.userRequired && !node.existing
|
||||
}
|
||||
|
||||
var sortActions = module.exports.sortActions = function (differences) {
|
||||
var actions = {}
|
||||
differences.forEach(function (action) {
|
||||
var child = action[1]
|
||||
actions[child.location] = action
|
||||
})
|
||||
|
||||
var sorted = []
|
||||
var added = {}
|
||||
|
||||
var sortedlocs = Object.keys(actions).sort(sortByLocation)
|
||||
|
||||
// We're going to sort the actions taken on top level dependencies first, before
|
||||
// considering the order of transitive deps. Because we're building our list
|
||||
// from the bottom up, this means we will return a list with top level deps LAST.
|
||||
// This is important in terms of keeping installations as consistent as possible
|
||||
// as folks add new dependencies.
|
||||
var toplocs = sortedlocs.filter(function (location) {
|
||||
var mod = actions[location][1]
|
||||
if (!mod.requiredBy) return true
|
||||
// If this module is required by any non-top level module
|
||||
// or by any extraneous module, eg user requested or existing
|
||||
// then we don't want to give this priority sorting.
|
||||
return !mod.requiredBy.some(isNotTopOrExtraneous)
|
||||
})
|
||||
|
||||
toplocs.concat(sortedlocs).forEach(function (location) {
|
||||
sortByDeps(actions[location])
|
||||
})
|
||||
|
||||
function sortByLocation (aa, bb) {
|
||||
return bb.localeCompare(aa)
|
||||
}
|
||||
function sortModuleByLocation (aa, bb) {
|
||||
return sortByLocation(aa && aa.location, bb && bb.location)
|
||||
}
|
||||
function sortByDeps (action) {
|
||||
var mod = action[1]
|
||||
if (added[mod.location]) return
|
||||
added[mod.location] = action
|
||||
if (!mod.requiredBy) mod.requiredBy = []
|
||||
mod.requiredBy.sort(sortModuleByLocation).forEach(function (mod) {
|
||||
if (actions[mod.location]) sortByDeps(actions[mod.location])
|
||||
})
|
||||
sorted.unshift(action)
|
||||
}
|
||||
|
||||
// safety net, anything excluded above gets tacked on the end
|
||||
differences.forEach((_) => {
|
||||
if (sorted.indexOf(_) === -1) sorted.push(_)
|
||||
})
|
||||
|
||||
return sorted
|
||||
}
|
||||
|
||||
function setAction (differences, action, pkg) {
|
||||
differences.push([action, pkg])
|
||||
}
|
||||
|
||||
var diffTrees = module.exports._diffTrees = function (oldTree, newTree) {
|
||||
validate('OO', arguments)
|
||||
var differences = []
|
||||
var flatOldTree = flattenTree(oldTree)
|
||||
var flatNewTree = flattenTree(newTree)
|
||||
var toRemove = {}
|
||||
var toRemoveByName = {}
|
||||
|
||||
// Build our tentative remove list. We don't add remove actions yet
|
||||
// because we might resuse them as part of a move.
|
||||
Object.keys(flatOldTree).forEach(function (flatname) {
|
||||
if (flatname === '/') return
|
||||
if (flatNewTree[flatname]) return
|
||||
var pkg = flatOldTree[flatname]
|
||||
if (pkg.isInLink && /^[.][.][/\\]/.test(path.relative(newTree.realpath, pkg.realpath))) return
|
||||
|
||||
toRemove[flatname] = pkg
|
||||
var name = moduleName(pkg)
|
||||
if (!toRemoveByName[name]) toRemoveByName[name] = []
|
||||
toRemoveByName[name].push({flatname: flatname, pkg: pkg})
|
||||
})
|
||||
|
||||
// generate our add/update/move actions
|
||||
Object.keys(flatNewTree).forEach(function (flatname) {
|
||||
if (flatname === '/') return
|
||||
var pkg = flatNewTree[flatname]
|
||||
var oldPkg = pkg.oldPkg = flatOldTree[flatname]
|
||||
if (oldPkg) {
|
||||
// if the versions are equivalent then we don't need to update… unless
|
||||
// the user explicitly asked us to.
|
||||
if (!pkg.userRequired && pkgAreEquiv(oldPkg, pkg)) return
|
||||
setAction(differences, 'update', pkg)
|
||||
} else {
|
||||
var name = moduleName(pkg)
|
||||
// find any packages we're removing that share the same name and are equivalent
|
||||
var removing = (toRemoveByName[name] || []).filter((rm) => pkgAreEquiv(rm.pkg, pkg))
|
||||
var bundlesOrFromBundle = pkg.fromBundle || pkg.package.bundleDependencies
|
||||
// if we have any removes that match AND we're not working with a bundle then upgrade to a move
|
||||
if (removing.length && !bundlesOrFromBundle) {
|
||||
var toMv = removing.shift()
|
||||
toRemoveByName[name] = toRemoveByName[name].filter((rm) => rm !== toMv)
|
||||
pkg.fromPath = toMv.pkg.path
|
||||
setAction(differences, 'move', pkg)
|
||||
delete toRemove[toMv.flatname]
|
||||
// we don't generate add actions for things found in links (which already exist on disk)
|
||||
} else if (!pkg.isInLink || !(pkg.fromBundle && pkg.fromBundle.isLink)) {
|
||||
setAction(differences, 'add', pkg)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// finally generate our remove actions from any not consumed by moves
|
||||
Object
|
||||
.keys(toRemove)
|
||||
.map((flatname) => toRemove[flatname])
|
||||
.forEach((pkg) => setAction(differences, 'remove', pkg))
|
||||
|
||||
return filterActions(differences)
|
||||
}
|
||||
|
||||
function filterActions (differences) {
|
||||
const includeOpt = npm.config.get('optional')
|
||||
const includeDev = npm.config.get('dev') ||
|
||||
(!/^prod(uction)?$/.test(npm.config.get('only')) && !npm.config.get('production')) ||
|
||||
/^dev(elopment)?$/.test(npm.config.get('only')) ||
|
||||
/^dev(elopment)?$/.test(npm.config.get('also'))
|
||||
const includeProd = !/^dev(elopment)?$/.test(npm.config.get('only'))
|
||||
if (includeProd && includeDev && includeOpt) return differences
|
||||
|
||||
log.silly('diff-trees', 'filtering actions:', 'includeDev', includeDev, 'includeProd', includeProd, 'includeOpt', includeOpt)
|
||||
return differences.filter((diff) => {
|
||||
const pkg = diff[1]
|
||||
const pkgIsOnlyDev = isOnlyDev(pkg)
|
||||
const pkgIsOnlyOpt = isOnlyOptional(pkg)
|
||||
if (!includeProd && pkgIsOnlyDev) return true
|
||||
if (includeDev && pkgIsOnlyDev) return true
|
||||
if (includeProd && !pkgIsOnlyDev && (includeOpt || !pkgIsOnlyOpt)) return true
|
||||
return false
|
||||
})
|
||||
}
|
27
website/node_modules/npm/lib/install/exists.js
generated
vendored
Normal file
27
website/node_modules/npm/lib/install/exists.js
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
'use strict'
|
||||
var fs = require('fs')
|
||||
var inflight = require('inflight')
|
||||
var accessError = require('./access-error.js')
|
||||
var isFsAccessAvailable = require('./is-fs-access-available.js')
|
||||
|
||||
if (isFsAccessAvailable) {
|
||||
module.exports = fsAccessImplementation
|
||||
} else {
|
||||
module.exports = fsStatImplementation
|
||||
}
|
||||
|
||||
// exposed only for testing purposes
|
||||
module.exports.fsAccessImplementation = fsAccessImplementation
|
||||
module.exports.fsStatImplementation = fsStatImplementation
|
||||
|
||||
function fsAccessImplementation (dir, done) {
|
||||
done = inflight('exists:' + dir, done)
|
||||
if (!done) return
|
||||
fs.access(dir, fs.F_OK, done)
|
||||
}
|
||||
|
||||
function fsStatImplementation (dir, done) {
|
||||
done = inflight('exists:' + dir, done)
|
||||
if (!done) return
|
||||
fs.stat(dir, function (er) { done(accessError(dir, er)) })
|
||||
}
|
42
website/node_modules/npm/lib/install/flatten-tree.js
generated
vendored
Normal file
42
website/node_modules/npm/lib/install/flatten-tree.js
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
'use strict'
|
||||
var validate = require('aproba')
|
||||
var moduleName = require('../utils/module-name.js')
|
||||
|
||||
module.exports = flattenTree
|
||||
module.exports.flatName = flatName
|
||||
module.exports.flatNameFromTree = flatNameFromTree
|
||||
|
||||
function flattenTree (tree) {
|
||||
validate('O', arguments)
|
||||
var seen = new Set()
|
||||
var flat = {}
|
||||
var todo = [[tree, '/']]
|
||||
while (todo.length) {
|
||||
var next = todo.shift()
|
||||
var pkg = next[0]
|
||||
seen.add(pkg)
|
||||
var path = next[1]
|
||||
flat[path] = pkg
|
||||
if (path !== '/') path += '/'
|
||||
for (var ii = 0; ii < pkg.children.length; ++ii) {
|
||||
var child = pkg.children[ii]
|
||||
if (!seen.has(child)) {
|
||||
todo.push([child, flatName(path, child)])
|
||||
}
|
||||
}
|
||||
}
|
||||
return flat
|
||||
}
|
||||
|
||||
function flatName (path, child) {
|
||||
validate('SO', arguments)
|
||||
return path + (moduleName(child) || 'TOP')
|
||||
}
|
||||
|
||||
function flatNameFromTree (tree) {
|
||||
validate('O', arguments)
|
||||
if (tree.isTop) return '/'
|
||||
var path = flatNameFromTree(tree.parent)
|
||||
if (path !== '/') path += '/'
|
||||
return flatName(path, tree)
|
||||
}
|
12
website/node_modules/npm/lib/install/get-requested.js
generated
vendored
Normal file
12
website/node_modules/npm/lib/install/get-requested.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
'use strict'
|
||||
const npa = require('npm-package-arg')
|
||||
const moduleName = require('../utils/module-name.js')
|
||||
|
||||
module.exports = function (child, reqBy) {
|
||||
if (!child.requiredBy.length) return
|
||||
if (!reqBy) reqBy = child.requiredBy[0]
|
||||
const deps = reqBy.package.dependencies || {}
|
||||
const devDeps = reqBy.package.devDependencies || {}
|
||||
const name = moduleName(child)
|
||||
return npa.resolve(name, deps[name] || devDeps[name], reqBy.realpath)
|
||||
}
|
20
website/node_modules/npm/lib/install/has-modern-meta.js
generated
vendored
Normal file
20
website/node_modules/npm/lib/install/has-modern-meta.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
'use strict'
|
||||
module.exports = hasModernMeta
|
||||
|
||||
const npa = require('npm-package-arg')
|
||||
const moduleName = require('../utils/module-name.js')
|
||||
|
||||
function isLink (child) {
|
||||
return child.isLink || (child.parent && isLink(child.parent))
|
||||
}
|
||||
|
||||
function hasModernMeta (child) {
|
||||
if (!child) return false
|
||||
const resolved = child.package._resolved && npa.resolve(moduleName(child), child.package._resolved)
|
||||
const version = npa.resolve(moduleName(child), child.package.version)
|
||||
return child.isTop ||
|
||||
isLink(child) ||
|
||||
child.fromBundle || child.package._inBundle ||
|
||||
child.package._integrity || child.package._shasum ||
|
||||
(resolved && resolved.type === 'git') || (version && version.type === 'git')
|
||||
}
|
18
website/node_modules/npm/lib/install/inflate-bundled.js
generated
vendored
Normal file
18
website/node_modules/npm/lib/install/inflate-bundled.js
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
'use strict'
|
||||
|
||||
var childPath = require('../utils/child-path.js')
|
||||
var reset = require('./node.js').reset
|
||||
|
||||
module.exports = function inflateBundled (bundler, parent, children) {
|
||||
children.forEach(function (child) {
|
||||
if (child.fromBundle === bundler) return
|
||||
reset(child)
|
||||
child.fromBundle = bundler
|
||||
child.isInLink = bundler.isLink
|
||||
child.parent = parent
|
||||
child.path = childPath(parent.path, child)
|
||||
child.realpath = bundler.isLink ? child.realpath : childPath(parent.realpath, child)
|
||||
child.isLink = child.isLink || parent.isLink || parent.target
|
||||
inflateBundled(bundler, child, child.children)
|
||||
})
|
||||
}
|
233
website/node_modules/npm/lib/install/inflate-shrinkwrap.js
generated
vendored
Normal file
233
website/node_modules/npm/lib/install/inflate-shrinkwrap.js
generated
vendored
Normal file
@@ -0,0 +1,233 @@
|
||||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
let addBundled
|
||||
const childPath = require('../utils/child-path.js')
|
||||
const createChild = require('./node.js').create
|
||||
let fetchPackageMetadata
|
||||
const inflateBundled = require('./inflate-bundled.js')
|
||||
const moduleName = require('../utils/module-name.js')
|
||||
const normalizePackageData = require('normalize-package-data')
|
||||
const npm = require('../npm.js')
|
||||
const realizeShrinkwrapSpecifier = require('./realize-shrinkwrap-specifier.js')
|
||||
const validate = require('aproba')
|
||||
const path = require('path')
|
||||
const isRegistry = require('../utils/is-registry.js')
|
||||
const hasModernMeta = require('./has-modern-meta.js')
|
||||
const ssri = require('ssri')
|
||||
const npa = require('npm-package-arg')
|
||||
|
||||
module.exports = function (tree, sw, opts, finishInflating) {
|
||||
if (!fetchPackageMetadata) {
|
||||
fetchPackageMetadata = BB.promisify(require('../fetch-package-metadata.js'))
|
||||
addBundled = BB.promisify(fetchPackageMetadata.addBundled)
|
||||
}
|
||||
if (arguments.length === 3) {
|
||||
finishInflating = opts
|
||||
opts = {}
|
||||
}
|
||||
if (!npm.config.get('shrinkwrap') || !npm.config.get('package-lock')) {
|
||||
return finishInflating()
|
||||
}
|
||||
tree.loaded = false
|
||||
tree.hasRequiresFromLock = sw.requires
|
||||
return inflateShrinkwrap(tree.path, tree, sw.dependencies, opts).then(
|
||||
() => finishInflating(),
|
||||
finishInflating
|
||||
)
|
||||
}
|
||||
|
||||
function inflateShrinkwrap (topPath, tree, swdeps, opts) {
|
||||
if (!swdeps) return Promise.resolve()
|
||||
if (!opts) opts = {}
|
||||
const onDisk = {}
|
||||
tree.children.forEach((child) => {
|
||||
onDisk[moduleName(child)] = child
|
||||
})
|
||||
|
||||
tree.children = []
|
||||
|
||||
return BB.each(Object.keys(swdeps), (name) => {
|
||||
const sw = swdeps[name]
|
||||
const dependencies = sw.dependencies || {}
|
||||
const requested = realizeShrinkwrapSpecifier(name, sw, topPath)
|
||||
return inflatableChild(
|
||||
onDisk[name], name, topPath, tree, sw, requested, opts
|
||||
).then((child) => {
|
||||
child.hasRequiresFromLock = tree.hasRequiresFromLock
|
||||
return inflateShrinkwrap(topPath, child, dependencies)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function normalizePackageDataNoErrors (pkg) {
|
||||
try {
|
||||
normalizePackageData(pkg)
|
||||
} catch (ex) {
|
||||
// don't care
|
||||
}
|
||||
}
|
||||
|
||||
function quotemeta (str) {
|
||||
return str.replace(/([^A-Za-z_0-9/])/g, '\\$1')
|
||||
}
|
||||
|
||||
function tarballToVersion (name, tb) {
|
||||
const registry = quotemeta(npm.config.get('registry'))
|
||||
.replace(/https?:/, 'https?:')
|
||||
.replace(/([^/])$/, '$1/')
|
||||
let matchRegTarball
|
||||
if (name) {
|
||||
const nameMatch = quotemeta(name)
|
||||
matchRegTarball = new RegExp(`^${registry}${nameMatch}/-/${nameMatch}-(.*)[.]tgz$`)
|
||||
} else {
|
||||
matchRegTarball = new RegExp(`^${registry}(.*)?/-/\\1-(.*)[.]tgz$`)
|
||||
}
|
||||
const match = tb.match(matchRegTarball)
|
||||
if (!match) return
|
||||
return match[2] || match[1]
|
||||
}
|
||||
|
||||
function inflatableChild (onDiskChild, name, topPath, tree, sw, requested, opts) {
|
||||
validate('OSSOOOO|ZSSOOOO', arguments)
|
||||
const usesIntegrity = (
|
||||
requested.registry ||
|
||||
requested.type === 'remote' ||
|
||||
requested.type === 'file'
|
||||
)
|
||||
const regTarball = tarballToVersion(name, sw.version)
|
||||
if (regTarball) {
|
||||
sw.resolved = sw.version
|
||||
sw.version = regTarball
|
||||
}
|
||||
if (sw.requires) Object.keys(sw.requires).map(_ => { sw.requires[_] = tarballToVersion(_, sw.requires[_]) || sw.requires[_] })
|
||||
const modernLink = requested.type === 'directory' && !sw.from
|
||||
if (hasModernMeta(onDiskChild) && childIsEquivalent(sw, requested, onDiskChild)) {
|
||||
// The version on disk matches the shrinkwrap entry.
|
||||
if (!onDiskChild.fromShrinkwrap) onDiskChild.fromShrinkwrap = requested
|
||||
onDiskChild.package._requested = requested
|
||||
onDiskChild.package._spec = requested.rawSpec
|
||||
onDiskChild.package._where = topPath
|
||||
onDiskChild.package._optional = sw.optional
|
||||
onDiskChild.package._development = sw.dev
|
||||
onDiskChild.package._inBundle = sw.bundled
|
||||
onDiskChild.fromBundle = (sw.bundled || onDiskChild.package._inBundle) ? tree.fromBundle || tree : null
|
||||
if (!onDiskChild.package._args) onDiskChild.package._args = []
|
||||
onDiskChild.package._args.push([String(requested), topPath])
|
||||
// non-npm registries can and will return unnormalized data, plus
|
||||
// even the npm registry may have package data normalized with older
|
||||
// normalization rules. This ensures we get package data in a consistent,
|
||||
// stable format.
|
||||
normalizePackageDataNoErrors(onDiskChild.package)
|
||||
onDiskChild.swRequires = sw.requires
|
||||
tree.children.push(onDiskChild)
|
||||
return BB.resolve(onDiskChild)
|
||||
} else if ((sw.version && (sw.integrity || !usesIntegrity) && (requested.type !== 'directory' || modernLink)) || sw.bundled) {
|
||||
// The shrinkwrap entry has an integrity field. We can fake a pkg to get
|
||||
// the installer to do a content-address fetch from the cache, if possible.
|
||||
return BB.resolve(makeFakeChild(name, topPath, tree, sw, requested))
|
||||
} else {
|
||||
// It's not on disk, and we can't just look it up by address -- do a full
|
||||
// fpm/inflate bundle pass. For registry deps, this will go straight to the
|
||||
// tarball URL, as if it were a remote tarball dep.
|
||||
return fetchChild(topPath, tree, sw, requested)
|
||||
}
|
||||
}
|
||||
|
||||
function isGit (sw) {
|
||||
const version = npa.resolve(sw.name, sw.version)
|
||||
return (version && version.type === 'git')
|
||||
}
|
||||
|
||||
function makeFakeChild (name, topPath, tree, sw, requested) {
|
||||
const from = sw.from || requested.raw
|
||||
const pkg = {
|
||||
name: name,
|
||||
version: sw.version,
|
||||
_id: name + '@' + sw.version,
|
||||
_resolved: sw.resolved || (isGit(sw) && sw.version),
|
||||
_requested: requested,
|
||||
_optional: sw.optional,
|
||||
_development: sw.dev,
|
||||
_inBundle: sw.bundled,
|
||||
_integrity: sw.integrity,
|
||||
_from: from,
|
||||
_spec: requested.rawSpec,
|
||||
_where: topPath,
|
||||
_args: [[requested.toString(), topPath]],
|
||||
dependencies: sw.requires
|
||||
}
|
||||
|
||||
if (!sw.bundled) {
|
||||
const bundleDependencies = Object.keys(sw.dependencies || {}).filter((d) => sw.dependencies[d].bundled)
|
||||
if (bundleDependencies.length === 0) {
|
||||
pkg.bundleDependencies = bundleDependencies
|
||||
}
|
||||
}
|
||||
const child = createChild({
|
||||
package: pkg,
|
||||
loaded: false,
|
||||
parent: tree,
|
||||
children: [],
|
||||
fromShrinkwrap: requested,
|
||||
fakeChild: sw,
|
||||
fromBundle: sw.bundled ? tree.fromBundle || tree : null,
|
||||
path: childPath(tree.path, pkg),
|
||||
realpath: requested.type === 'directory' ? requested.fetchSpec : childPath(tree.realpath, pkg),
|
||||
location: (tree.location === '/' ? '' : tree.location + '/') + pkg.name,
|
||||
isLink: requested.type === 'directory',
|
||||
isInLink: tree.isLink,
|
||||
swRequires: sw.requires
|
||||
})
|
||||
tree.children.push(child)
|
||||
return child
|
||||
}
|
||||
|
||||
function fetchChild (topPath, tree, sw, requested) {
|
||||
return fetchPackageMetadata(requested, topPath).then((pkg) => {
|
||||
pkg._from = sw.from || requested.raw
|
||||
pkg._optional = sw.optional
|
||||
pkg._development = sw.dev
|
||||
pkg._inBundle = false
|
||||
return addBundled(pkg).then(() => pkg)
|
||||
}).then((pkg) => {
|
||||
var isLink = pkg._requested.type === 'directory'
|
||||
const child = createChild({
|
||||
package: pkg,
|
||||
loaded: false,
|
||||
parent: tree,
|
||||
fromShrinkwrap: requested,
|
||||
path: childPath(tree.path, pkg),
|
||||
realpath: isLink ? requested.fetchSpec : childPath(tree.realpath, pkg),
|
||||
children: pkg._bundled || [],
|
||||
location: (tree.location === '/' ? '' : tree.location + '/') + pkg.name,
|
||||
fromBundle: null,
|
||||
isLink: isLink,
|
||||
isInLink: tree.isLink,
|
||||
swRequires: sw.requires
|
||||
})
|
||||
tree.children.push(child)
|
||||
if (pkg._bundled) {
|
||||
delete pkg._bundled
|
||||
inflateBundled(child, child, child.children)
|
||||
}
|
||||
return child
|
||||
})
|
||||
}
|
||||
|
||||
function childIsEquivalent (sw, requested, child) {
|
||||
if (!child) return false
|
||||
if (child.fromShrinkwrap) return true
|
||||
if (
|
||||
sw.integrity &&
|
||||
child.package._integrity &&
|
||||
ssri.parse(sw.integrity).match(child.package._integrity)
|
||||
) return true
|
||||
if (child.isLink && requested.type === 'directory') return path.relative(child.realpath, requested.fetchSpec) === ''
|
||||
|
||||
if (sw.resolved) return child.package._resolved === sw.resolved
|
||||
if (!isRegistry(requested) && sw.from) return child.package._from === sw.from
|
||||
if (!isRegistry(requested) && child.package._resolved) return sw.version === child.package._resolved
|
||||
return child.package.version === sw.version
|
||||
}
|
8
website/node_modules/npm/lib/install/is-dev-dep.js
generated
vendored
Normal file
8
website/node_modules/npm/lib/install/is-dev-dep.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
'use strict'
|
||||
module.exports = isDevDep
|
||||
|
||||
function isDevDep (node, name) {
|
||||
return node.package &&
|
||||
node.package.devDependencies &&
|
||||
node.package.devDependencies[name]
|
||||
}
|
27
website/node_modules/npm/lib/install/is-extraneous.js
generated
vendored
Normal file
27
website/node_modules/npm/lib/install/is-extraneous.js
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
'use strict'
|
||||
module.exports = isExtraneous
|
||||
|
||||
function isExtraneous (tree) {
|
||||
var result = !isNotExtraneous(tree)
|
||||
return result
|
||||
}
|
||||
|
||||
function topHasNoPjson (tree) {
|
||||
var top = tree
|
||||
while (!top.isTop) top = top.parent
|
||||
return top.error
|
||||
}
|
||||
|
||||
function isNotExtraneous (tree, isCycle) {
|
||||
if (!isCycle) isCycle = {}
|
||||
if (tree.isTop || tree.userRequired) {
|
||||
return true
|
||||
} else if (isCycle[tree.path]) {
|
||||
return topHasNoPjson(tree)
|
||||
} else {
|
||||
isCycle[tree.path] = true
|
||||
return tree.requiredBy && tree.requiredBy.some(function (node) {
|
||||
return isNotExtraneous(node, Object.create(isCycle))
|
||||
})
|
||||
}
|
||||
}
|
22
website/node_modules/npm/lib/install/is-fs-access-available.js
generated
vendored
Normal file
22
website/node_modules/npm/lib/install/is-fs-access-available.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
'use strict'
|
||||
var fs = require('fs')
|
||||
var semver = require('semver')
|
||||
var isWindows = process.platform === 'win32'
|
||||
|
||||
// fs.access first introduced in node 0.12 / io.js
|
||||
if (!fs.access) {
|
||||
module.exports = false
|
||||
} else if (!isWindows) {
|
||||
// fs.access always works on non-Windows OSes
|
||||
module.exports = true
|
||||
} else {
|
||||
// The Windows implementation of `fs.access` has a bug where it will
|
||||
// sometimes return access errors all the time for directories, even
|
||||
// when access is available. As all we actually test ARE directories, this
|
||||
// is a bit of a problem.
|
||||
// This was fixed in io.js version 1.5.0
|
||||
// As of 2015-07-20, it is still unfixed in node:
|
||||
// https://github.com/joyent/node/issues/25657
|
||||
|
||||
module.exports = semver.gte(process.version, '1.5.0')
|
||||
}
|
35
website/node_modules/npm/lib/install/is-only-dev.js
generated
vendored
Normal file
35
website/node_modules/npm/lib/install/is-only-dev.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
'use strict'
|
||||
module.exports = isOnlyDev
|
||||
|
||||
const moduleName = require('../utils/module-name.js')
|
||||
const isDevDep = require('./is-dev-dep.js')
|
||||
const isProdDep = require('./is-prod-dep.js')
|
||||
|
||||
// Returns true if the module `node` is only required direcctly as a dev
|
||||
// dependency of the top level or transitively _from_ top level dev
|
||||
// dependencies.
|
||||
// Dual mode modules (that are both dev AND prod) should return false.
|
||||
function isOnlyDev (node, seen) {
|
||||
if (!seen) seen = new Set()
|
||||
return node.requiredBy.length && node.requiredBy.every(andIsOnlyDev(moduleName(node), seen))
|
||||
}
|
||||
|
||||
// There is a known limitation with this implementation: If a dependency is
|
||||
// ONLY required by cycles that are detached from the top level then it will
|
||||
// ultimately return true.
|
||||
//
|
||||
// This is ok though: We don't allow shrinkwraps with extraneous deps and
|
||||
// these situation is caught by the extraneous checker before we get here.
|
||||
function andIsOnlyDev (name, seen) {
|
||||
return function (req) {
|
||||
const isDev = isDevDep(req, name)
|
||||
const isProd = isProdDep(req, name)
|
||||
if (req.isTop) {
|
||||
return isDev && !isProd
|
||||
} else {
|
||||
if (seen.has(req)) return true
|
||||
seen.add(req)
|
||||
return isOnlyDev(req, seen)
|
||||
}
|
||||
}
|
||||
}
|
19
website/node_modules/npm/lib/install/is-only-optional.js
generated
vendored
Normal file
19
website/node_modules/npm/lib/install/is-only-optional.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
'use strict'
|
||||
module.exports = isOptional
|
||||
|
||||
const isOptDep = require('./is-opt-dep.js')
|
||||
|
||||
function isOptional (node, seen) {
|
||||
if (!seen) seen = new Set()
|
||||
// If a node is not required by anything, then we've reached
|
||||
// the top level package.
|
||||
if (seen.has(node) || node.requiredBy.length === 0) {
|
||||
return false
|
||||
}
|
||||
seen.add(node)
|
||||
const swOptional = node.fromShrinkwrap && node.package._optional
|
||||
return node.requiredBy.every(function (req) {
|
||||
if (req.fakeChild && swOptional) return true
|
||||
return isOptDep(req, node.package.name) || isOptional(req, seen)
|
||||
})
|
||||
}
|
8
website/node_modules/npm/lib/install/is-opt-dep.js
generated
vendored
Normal file
8
website/node_modules/npm/lib/install/is-opt-dep.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
'use strict'
|
||||
module.exports = isOptDep
|
||||
|
||||
function isOptDep (node, name) {
|
||||
return node.package &&
|
||||
node.package.optionalDependencies &&
|
||||
node.package.optionalDependencies[name]
|
||||
}
|
9
website/node_modules/npm/lib/install/is-prod-dep.js
generated
vendored
Normal file
9
website/node_modules/npm/lib/install/is-prod-dep.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = isProdDep
|
||||
|
||||
function isProdDep (node, name) {
|
||||
return node.package &&
|
||||
node.package.dependencies &&
|
||||
node.package.dependencies[name]
|
||||
}
|
8
website/node_modules/npm/lib/install/module-staging-path.js
generated
vendored
Normal file
8
website/node_modules/npm/lib/install/module-staging-path.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
'use strict'
|
||||
var uniqueFilename = require('unique-filename')
|
||||
var moduleName = require('../utils/module-name.js')
|
||||
|
||||
module.exports = moduleStagingPath
|
||||
function moduleStagingPath (staging, pkg) {
|
||||
return uniqueFilename(staging, moduleName(pkg), pkg.realpath)
|
||||
}
|
140
website/node_modules/npm/lib/install/mutate-into-logical-tree.js
generated
vendored
Normal file
140
website/node_modules/npm/lib/install/mutate-into-logical-tree.js
generated
vendored
Normal file
@@ -0,0 +1,140 @@
|
||||
'use strict'
|
||||
var union = require('lodash.union')
|
||||
var without = require('lodash.without')
|
||||
var validate = require('aproba')
|
||||
var flattenTree = require('./flatten-tree.js')
|
||||
var isExtraneous = require('./is-extraneous.js')
|
||||
var validateAllPeerDeps = require('./deps.js').validateAllPeerDeps
|
||||
var packageId = require('../utils/package-id.js')
|
||||
var moduleName = require('../utils/module-name.js')
|
||||
var npm = require('../npm.js')
|
||||
|
||||
// Return true if tree is a part of a cycle that:
|
||||
// A) Never connects to the top of the tree
|
||||
// B) Has not not had a point in the cycle arbitrarily declared its top
|
||||
// yet.
|
||||
function isDisconnectedCycle (tree, seen) {
|
||||
if (!seen) seen = {}
|
||||
if (tree.isTop || tree.cycleTop || tree.requiredBy.length === 0) {
|
||||
return false
|
||||
} else if (seen[tree.path]) {
|
||||
return true
|
||||
} else {
|
||||
seen[tree.path] = true
|
||||
return tree.requiredBy.every(function (node) {
|
||||
return isDisconnectedCycle(node, Object.create(seen))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
var mutateIntoLogicalTree = module.exports = function (tree) {
|
||||
validate('O', arguments)
|
||||
|
||||
validateAllPeerDeps(tree, function (tree, pkgname, version) {
|
||||
if (!tree.missingPeers) tree.missingPeers = {}
|
||||
tree.missingPeers[pkgname] = version
|
||||
})
|
||||
|
||||
var flat = flattenTree(tree)
|
||||
|
||||
Object.keys(flat).sort().forEach(function (flatname) {
|
||||
var node = flat[flatname]
|
||||
if (!(node.requiredBy && node.requiredBy.length)) return
|
||||
|
||||
if (node.parent) {
|
||||
// If a node is a cycle that never reaches the root of the logical
|
||||
// tree then we'll leave it attached to the root, or else it
|
||||
// would go missing. Further we'll note that this is the node in the
|
||||
// cycle that we picked arbitrarily to be the one attached to the root.
|
||||
// others will fall
|
||||
if (isDisconnectedCycle(node)) {
|
||||
node.cycleTop = true
|
||||
// Nor do we want to disconnect non-cyclical extraneous modules from the tree.
|
||||
} else if (node.requiredBy.length) {
|
||||
// regular deps though, we do, as we're moving them into the capable
|
||||
// hands of the modules that require them.
|
||||
node.parent.children = without(node.parent.children, node)
|
||||
}
|
||||
}
|
||||
|
||||
node.requiredBy.forEach(function (parentNode) {
|
||||
parentNode.children = union(parentNode.children, [node])
|
||||
})
|
||||
})
|
||||
return tree
|
||||
}
|
||||
|
||||
module.exports.asReadInstalled = function (tree) {
|
||||
mutateIntoLogicalTree(tree)
|
||||
return translateTree(tree)
|
||||
}
|
||||
|
||||
function translateTree (tree) {
|
||||
return translateTree_(tree, new Set())
|
||||
}
|
||||
|
||||
function translateTree_ (tree, seen) {
|
||||
var pkg = tree.package
|
||||
if (seen.has(tree)) return pkg
|
||||
seen.add(tree)
|
||||
if (pkg._dependencies) return pkg
|
||||
pkg._dependencies = pkg.dependencies
|
||||
pkg.dependencies = {}
|
||||
tree.children.forEach(function (child) {
|
||||
const dep = pkg.dependencies[moduleName(child)] = translateTree_(child, seen)
|
||||
if (child.fakeChild) {
|
||||
dep.missing = true
|
||||
dep.optional = child.package._optional
|
||||
dep.requiredBy = child.package._spec
|
||||
}
|
||||
})
|
||||
|
||||
function markMissing (name, requiredBy) {
|
||||
if (pkg.dependencies[name]) {
|
||||
if (pkg.dependencies[name].missing) return
|
||||
pkg.dependencies[name].invalid = true
|
||||
pkg.dependencies[name].realName = name
|
||||
pkg.dependencies[name].extraneous = false
|
||||
} else {
|
||||
pkg.dependencies[name] = {
|
||||
requiredBy: requiredBy,
|
||||
missing: true,
|
||||
optional: !!pkg.optionalDependencies[name]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Object.keys(tree.missingDeps).forEach(function (name) {
|
||||
markMissing(name, tree.missingDeps[name])
|
||||
})
|
||||
Object.keys(tree.missingDevDeps).forEach(function (name) {
|
||||
markMissing(name, tree.missingDevDeps[name])
|
||||
})
|
||||
var checkForMissingPeers = (tree.parent ? [] : [tree]).concat(tree.children)
|
||||
checkForMissingPeers.filter(function (child) {
|
||||
return child.missingPeers
|
||||
}).forEach(function (child) {
|
||||
Object.keys(child.missingPeers).forEach(function (pkgname) {
|
||||
var version = child.missingPeers[pkgname]
|
||||
var peerPkg = pkg.dependencies[pkgname]
|
||||
if (!peerPkg) {
|
||||
peerPkg = pkg.dependencies[pkgname] = {
|
||||
_id: pkgname + '@' + version,
|
||||
name: pkgname,
|
||||
version: version
|
||||
}
|
||||
}
|
||||
if (!peerPkg.peerMissing) peerPkg.peerMissing = []
|
||||
peerPkg.peerMissing.push({
|
||||
requiredBy: packageId(child),
|
||||
requires: pkgname + '@' + version
|
||||
})
|
||||
})
|
||||
})
|
||||
pkg.path = tree.path
|
||||
|
||||
pkg.error = tree.error
|
||||
pkg.extraneous = !tree.isTop && (!tree.parent.isTop || !tree.parent.error) && !npm.config.get('global') && isExtraneous(tree)
|
||||
if (tree.target && tree.parent && !tree.parent.target) pkg.link = tree.realpath
|
||||
return pkg
|
||||
}
|
77
website/node_modules/npm/lib/install/node.js
generated
vendored
Normal file
77
website/node_modules/npm/lib/install/node.js
generated
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
'use strict'
|
||||
|
||||
var defaultTemplate = {
|
||||
package: {
|
||||
version: '',
|
||||
dependencies: {},
|
||||
devDependencies: {},
|
||||
optionalDependencies: {}
|
||||
},
|
||||
loaded: false,
|
||||
children: [],
|
||||
requiredBy: [],
|
||||
requires: [],
|
||||
missingDeps: {},
|
||||
missingDevDeps: {},
|
||||
phantomChildren: {},
|
||||
path: null,
|
||||
realpath: null,
|
||||
location: null,
|
||||
userRequired: false,
|
||||
save: false,
|
||||
saveSpec: null,
|
||||
isTop: false,
|
||||
fromBundle: false
|
||||
}
|
||||
|
||||
function isLink (node) {
|
||||
return node && node.isLink
|
||||
}
|
||||
function isInLink (node) {
|
||||
return node && (node.isInLink || node.isLink)
|
||||
}
|
||||
|
||||
var create = exports.create = function (node, template, isNotTop) {
|
||||
if (!template) template = defaultTemplate
|
||||
Object.keys(template).forEach(function (key) {
|
||||
if (template[key] != null && typeof template[key] === 'object' && !(template[key] instanceof Array)) {
|
||||
if (!node[key]) node[key] = {}
|
||||
return create(node[key], template[key], true)
|
||||
}
|
||||
if (node[key] != null) return
|
||||
node[key] = template[key]
|
||||
})
|
||||
if (!isNotTop) {
|
||||
// isLink is true for the symlink and everything inside it.
|
||||
// by contrast, isInLink is true for only the things inside a link
|
||||
if (node.isLink == null) node.isLink = isLink(node.parent)
|
||||
if (node.isInLink == null) node.isInLink = isInLink(node.parent)
|
||||
if (node.fromBundle == null) {
|
||||
node.fromBundle = false
|
||||
}
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
exports.reset = function (node) {
|
||||
reset(node, new Set())
|
||||
}
|
||||
|
||||
function reset (node, seen) {
|
||||
if (seen.has(node)) return
|
||||
seen.add(node)
|
||||
var child = create(node)
|
||||
|
||||
// FIXME: cleaning up after read-package-json's mess =(
|
||||
if (child.package._id === '@') delete child.package._id
|
||||
|
||||
child.isTop = false
|
||||
child.requiredBy = []
|
||||
child.requires = []
|
||||
child.missingDeps = {}
|
||||
child.missingDevDeps = {}
|
||||
child.phantomChildren = {}
|
||||
child.location = null
|
||||
|
||||
child.children.forEach(function (child) { reset(child, seen) })
|
||||
}
|
108
website/node_modules/npm/lib/install/read-shrinkwrap.js
generated
vendored
Normal file
108
website/node_modules/npm/lib/install/read-shrinkwrap.js
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
const fs = require('graceful-fs')
|
||||
const iferr = require('iferr')
|
||||
const inflateShrinkwrap = require('./inflate-shrinkwrap.js')
|
||||
const log = require('npmlog')
|
||||
const parseJSON = require('../utils/parse-json.js')
|
||||
const path = require('path')
|
||||
const PKGLOCK_VERSION = require('../npm.js').lockfileVersion
|
||||
|
||||
const readFileAsync = BB.promisify(fs.readFile)
|
||||
|
||||
module.exports = readShrinkwrap
|
||||
function readShrinkwrap (child, next) {
|
||||
if (child.package._shrinkwrap) return process.nextTick(next)
|
||||
BB.join(
|
||||
maybeReadFile('npm-shrinkwrap.json', child),
|
||||
// Don't read non-root lockfiles
|
||||
child.isTop && maybeReadFile('package-lock.json', child),
|
||||
child.isTop && maybeReadFile('package.json', child),
|
||||
(shrinkwrap, lockfile, pkgJson) => {
|
||||
if (shrinkwrap && lockfile) {
|
||||
log.warn('read-shrinkwrap', 'Ignoring package-lock.json because there is already an npm-shrinkwrap.json. Please use only one of the two.')
|
||||
}
|
||||
const name = shrinkwrap ? 'npm-shrinkwrap.json' : 'package-lock.json'
|
||||
const parsed = parsePkgLock(shrinkwrap || lockfile, name)
|
||||
if (parsed && parsed.lockfileVersion !== PKGLOCK_VERSION) {
|
||||
log.warn('read-shrinkwrap', `This version of npm is compatible with lockfileVersion@${PKGLOCK_VERSION}, but ${name} was generated for lockfileVersion@${parsed.lockfileVersion || 0}. I'll try to do my best with it!`)
|
||||
}
|
||||
child.package._shrinkwrap = parsed
|
||||
}
|
||||
).then(() => next(), next)
|
||||
}
|
||||
|
||||
function maybeReadFile (name, child) {
|
||||
return readFileAsync(
|
||||
path.join(child.path, name),
|
||||
'utf8'
|
||||
).catch({code: 'ENOENT'}, () => null)
|
||||
}
|
||||
|
||||
module.exports.andInflate = function (child, next) {
|
||||
readShrinkwrap(child, iferr(next, function () {
|
||||
if (child.package._shrinkwrap) {
|
||||
return inflateShrinkwrap(child, child.package._shrinkwrap || {}, next)
|
||||
} else {
|
||||
return next()
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
const PARENT_RE = /\|{7,}/g
|
||||
const OURS_RE = /<{7,}/g
|
||||
const THEIRS_RE = /={7,}/g
|
||||
const END_RE = />{7,}/g
|
||||
|
||||
module.exports._isDiff = isDiff
|
||||
function isDiff (str) {
|
||||
return str.match(OURS_RE) && str.match(THEIRS_RE) && str.match(END_RE)
|
||||
}
|
||||
|
||||
module.exports._parsePkgLock = parsePkgLock
|
||||
function parsePkgLock (str, filename) {
|
||||
if (!str) { return null }
|
||||
try {
|
||||
return parseJSON(str)
|
||||
} catch (e) {
|
||||
if (isDiff(str)) {
|
||||
log.warn('conflict', `A git conflict was detected in ${filename}. Attempting to auto-resolve.`)
|
||||
log.warn('conflict', 'To make this happen automatically on git rebase/merge, consider using the npm-merge-driver:')
|
||||
log.warn('conflict', '$ npx npm-merge-driver install -g')
|
||||
const pieces = str.split(/[\n\r]+/g).reduce((acc, line) => {
|
||||
if (line.match(PARENT_RE)) acc.state = 'parent'
|
||||
else if (line.match(OURS_RE)) acc.state = 'ours'
|
||||
else if (line.match(THEIRS_RE)) acc.state = 'theirs'
|
||||
else if (line.match(END_RE)) acc.state = 'top'
|
||||
else {
|
||||
if (acc.state === 'top' || acc.state === 'ours') acc.ours += line
|
||||
if (acc.state === 'top' || acc.state === 'theirs') acc.theirs += line
|
||||
if (acc.state === 'top' || acc.state === 'parent') acc.parent += line
|
||||
}
|
||||
return acc
|
||||
}, {
|
||||
state: 'top',
|
||||
ours: '',
|
||||
theirs: '',
|
||||
parent: ''
|
||||
})
|
||||
try {
|
||||
const ours = parseJSON(pieces.ours)
|
||||
const theirs = parseJSON(pieces.theirs)
|
||||
return reconcileLockfiles(ours, theirs)
|
||||
} catch (_e) {
|
||||
log.error('conflict', `Automatic conflict resolution failed. Please manually resolve conflicts in ${filename} and try again.`)
|
||||
log.silly('conflict', `Error during resolution: ${_e}`)
|
||||
throw e
|
||||
}
|
||||
} else {
|
||||
throw e
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function reconcileLockfiles (parent, ours, theirs) {
|
||||
return Object.assign({}, ours, theirs)
|
||||
}
|
22
website/node_modules/npm/lib/install/realize-shrinkwrap-specifier.js
generated
vendored
Normal file
22
website/node_modules/npm/lib/install/realize-shrinkwrap-specifier.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
'use strict'
|
||||
var npa = require('npm-package-arg')
|
||||
const isRegistry = require('../utils/is-registry.js')
|
||||
|
||||
module.exports = function (name, sw, where) {
|
||||
try {
|
||||
if (sw.version && sw.integrity) {
|
||||
return npa.resolve(name, sw.version, where)
|
||||
} else if (sw.from) {
|
||||
const spec = npa(sw.from, where)
|
||||
if (isRegistry(spec) && sw.version) {
|
||||
return npa.resolve(name, sw.version, where)
|
||||
} else if (!sw.resolved) {
|
||||
return spec
|
||||
}
|
||||
}
|
||||
if (sw.resolved) {
|
||||
return npa.resolve(name, sw.resolved, where)
|
||||
}
|
||||
} catch (_) { }
|
||||
return npa.resolve(name, sw.version, where)
|
||||
}
|
31
website/node_modules/npm/lib/install/report-optional-failure.js
generated
vendored
Normal file
31
website/node_modules/npm/lib/install/report-optional-failure.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
'use strict'
|
||||
var path = require('path')
|
||||
var moduleName = require('../utils/module-name.js')
|
||||
|
||||
module.exports = reportOptionalFailure
|
||||
|
||||
function top (tree) {
|
||||
if (tree.parent) return top(tree.parent)
|
||||
return tree
|
||||
}
|
||||
|
||||
function reportOptionalFailure (tree, what, error) {
|
||||
var topTree = top(tree)
|
||||
if (!topTree.warnings) topTree.warnings = []
|
||||
var id
|
||||
if (what) {
|
||||
var depVer = tree.package.dependencies && tree.package.dependencies[what]
|
||||
var optDepVer = tree.package.optionalDependencies && tree.package.optionalDependencies[what]
|
||||
var devDepVer = tree.package.devDependencies && tree.package.devDependencies[what]
|
||||
var version = depVer || optDepVer || devDepVer
|
||||
id = what + (version ? '@' + version : '')
|
||||
} else {
|
||||
id = tree._id || moduleName(tree) + (tree.package.version ? '@' + tree.package.version : '')
|
||||
}
|
||||
var location = path.relative(topTree.path, tree.path)
|
||||
if (what) location = path.join(location, 'node_modules', what)
|
||||
|
||||
error.optional = id
|
||||
error.location = location
|
||||
topTree.warnings.push(error)
|
||||
}
|
189
website/node_modules/npm/lib/install/save.js
generated
vendored
Normal file
189
website/node_modules/npm/lib/install/save.js
generated
vendored
Normal file
@@ -0,0 +1,189 @@
|
||||
'use strict'
|
||||
|
||||
const deepSortObject = require('../utils/deep-sort-object.js')
|
||||
const detectIndent = require('detect-indent')
|
||||
const detectNewline = require('detect-newline')
|
||||
const fs = require('graceful-fs')
|
||||
const iferr = require('iferr')
|
||||
const log = require('npmlog')
|
||||
const moduleName = require('../utils/module-name.js')
|
||||
const npm = require('../npm.js')
|
||||
const parseJSON = require('../utils/parse-json.js')
|
||||
const path = require('path')
|
||||
const stringifyPackage = require('stringify-package')
|
||||
const validate = require('aproba')
|
||||
const without = require('lodash.without')
|
||||
const writeFileAtomic = require('write-file-atomic')
|
||||
|
||||
// if the -S|--save option is specified, then write installed packages
|
||||
// as dependencies to a package.json file.
|
||||
|
||||
exports.saveRequested = function (tree, andReturn) {
|
||||
validate('OF', arguments)
|
||||
savePackageJson(tree, andWarnErrors(andSaveShrinkwrap(tree, andReturn)))
|
||||
}
|
||||
|
||||
function andSaveShrinkwrap (tree, andReturn) {
|
||||
validate('OF', arguments)
|
||||
return function (er) {
|
||||
validate('E', arguments)
|
||||
saveShrinkwrap(tree, andWarnErrors(andReturn))
|
||||
}
|
||||
}
|
||||
|
||||
function andWarnErrors (cb) {
|
||||
validate('F', arguments)
|
||||
return function (er) {
|
||||
if (er) log.warn('saveError', er.message)
|
||||
arguments[0] = null
|
||||
cb.apply(null, arguments)
|
||||
}
|
||||
}
|
||||
|
||||
exports.saveShrinkwrap = saveShrinkwrap
|
||||
|
||||
function saveShrinkwrap (tree, next) {
|
||||
validate('OF', arguments)
|
||||
if (!npm.config.get('shrinkwrap') || !npm.config.get('package-lock')) {
|
||||
return next()
|
||||
}
|
||||
require('../shrinkwrap.js').createShrinkwrap(tree, {silent: false}, next)
|
||||
}
|
||||
|
||||
function savePackageJson (tree, next) {
|
||||
validate('OF', arguments)
|
||||
var saveBundle = npm.config.get('save-bundle')
|
||||
|
||||
// each item in the tree is a top-level thing that should be saved
|
||||
// to the package.json file.
|
||||
// The relevant tree shape is { <folder>: {what:<pkg>} }
|
||||
var saveTarget = path.resolve(tree.path, 'package.json')
|
||||
// don't use readJson, because we don't want to do all the other
|
||||
// tricky npm-specific stuff that's in there.
|
||||
fs.readFile(saveTarget, 'utf8', iferr(next, function (packagejson) {
|
||||
const indent = detectIndent(packagejson).indent
|
||||
const newline = detectNewline(packagejson)
|
||||
try {
|
||||
tree.package = parseJSON(packagejson)
|
||||
} catch (ex) {
|
||||
return next(ex)
|
||||
}
|
||||
|
||||
// If we're saving bundled deps, normalize the key before we start
|
||||
if (saveBundle) {
|
||||
var bundle = tree.package.bundleDependencies || tree.package.bundledDependencies
|
||||
delete tree.package.bundledDependencies
|
||||
if (!Array.isArray(bundle)) bundle = []
|
||||
}
|
||||
|
||||
var toSave = getThingsToSave(tree)
|
||||
var toRemove = getThingsToRemove(tree)
|
||||
var savingTo = {}
|
||||
toSave.forEach(function (pkg) { if (pkg.save) savingTo[pkg.save] = true })
|
||||
toRemove.forEach(function (pkg) { if (pkg.save) savingTo[pkg.save] = true })
|
||||
|
||||
Object.keys(savingTo).forEach(function (save) {
|
||||
if (!tree.package[save]) tree.package[save] = {}
|
||||
})
|
||||
|
||||
log.verbose('saving', toSave)
|
||||
const types = ['dependencies', 'devDependencies', 'optionalDependencies']
|
||||
toSave.forEach(function (pkg) {
|
||||
if (pkg.save) tree.package[pkg.save][pkg.name] = pkg.spec
|
||||
const movedFrom = []
|
||||
for (let saveType of types) {
|
||||
if (
|
||||
pkg.save !== saveType &&
|
||||
tree.package[saveType] &&
|
||||
tree.package[saveType][pkg.name]
|
||||
) {
|
||||
movedFrom.push(saveType)
|
||||
delete tree.package[saveType][pkg.name]
|
||||
}
|
||||
}
|
||||
if (movedFrom.length) {
|
||||
log.notice('save', `${pkg.name} is being moved from ${movedFrom.join(' and ')} to ${pkg.save}`)
|
||||
}
|
||||
if (saveBundle) {
|
||||
var ii = bundle.indexOf(pkg.name)
|
||||
if (ii === -1) bundle.push(pkg.name)
|
||||
}
|
||||
})
|
||||
|
||||
toRemove.forEach(function (pkg) {
|
||||
if (pkg.save) delete tree.package[pkg.save][pkg.name]
|
||||
if (saveBundle) {
|
||||
bundle = without(bundle, pkg.name)
|
||||
}
|
||||
})
|
||||
|
||||
Object.keys(savingTo).forEach(function (key) {
|
||||
tree.package[key] = deepSortObject(tree.package[key])
|
||||
})
|
||||
if (saveBundle) {
|
||||
tree.package.bundleDependencies = deepSortObject(bundle)
|
||||
}
|
||||
|
||||
var json = stringifyPackage(tree.package, indent, newline)
|
||||
if (json === packagejson) {
|
||||
log.verbose('shrinkwrap', 'skipping write for package.json because there were no changes.')
|
||||
next()
|
||||
} else {
|
||||
writeFileAtomic(saveTarget, json, next)
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
exports.getSaveType = function (tree, arg) {
|
||||
if (arguments.length) validate('OO', arguments)
|
||||
var globalInstall = npm.config.get('global')
|
||||
var noSaveFlags = !npm.config.get('save') &&
|
||||
!npm.config.get('save-dev') &&
|
||||
!npm.config.get('save-prod') &&
|
||||
!npm.config.get('save-optional')
|
||||
if (globalInstall || noSaveFlags) return null
|
||||
|
||||
if (npm.config.get('save-optional')) {
|
||||
return 'optionalDependencies'
|
||||
} else if (npm.config.get('save-dev')) {
|
||||
return 'devDependencies'
|
||||
} else if (npm.config.get('save-prod')) {
|
||||
return 'dependencies'
|
||||
} else {
|
||||
if (arg) {
|
||||
var name = moduleName(arg)
|
||||
if (tree.package.optionalDependencies[name]) {
|
||||
return 'optionalDependencies'
|
||||
} else if (tree.package.devDependencies[name]) {
|
||||
return 'devDependencies'
|
||||
}
|
||||
}
|
||||
return 'dependencies'
|
||||
}
|
||||
}
|
||||
|
||||
function getThingsToSave (tree) {
|
||||
validate('O', arguments)
|
||||
var toSave = tree.children.filter(function (child) {
|
||||
return child.save
|
||||
}).map(function (child) {
|
||||
return {
|
||||
name: moduleName(child),
|
||||
spec: child.saveSpec,
|
||||
save: child.save
|
||||
}
|
||||
})
|
||||
return toSave
|
||||
}
|
||||
|
||||
function getThingsToRemove (tree) {
|
||||
validate('O', arguments)
|
||||
if (!tree.removedChildren) return []
|
||||
var toRemove = tree.removedChildren.map(function (child) {
|
||||
return {
|
||||
name: moduleName(child),
|
||||
save: child.save
|
||||
}
|
||||
})
|
||||
return toRemove
|
||||
}
|
56
website/node_modules/npm/lib/install/update-package-json.js
generated
vendored
Normal file
56
website/node_modules/npm/lib/install/update-package-json.js
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
'use strict'
|
||||
var path = require('path')
|
||||
var writeFileAtomic = require('write-file-atomic')
|
||||
var moduleName = require('../utils/module-name.js')
|
||||
var deepSortObject = require('../utils/deep-sort-object.js')
|
||||
var sortedObject = require('sorted-object')
|
||||
var isWindows = require('../utils/is-windows.js')
|
||||
|
||||
var sortKeys = [
|
||||
'dependencies', 'devDependencies', 'bundleDependencies',
|
||||
'optionalDependencies', 'keywords', 'engines', 'scripts',
|
||||
'files'
|
||||
]
|
||||
|
||||
module.exports = function (mod, buildpath, next) {
|
||||
var pkg = sortedObject(mod.package)
|
||||
var name = moduleName(mod)
|
||||
// Add our diagnostic keys to the package.json.
|
||||
// Note that there are folks relying on these, for ex, the Visual Studio
|
||||
// Node.js addon.
|
||||
pkg._requiredBy =
|
||||
mod.requiredBy
|
||||
.map(function (req) {
|
||||
if (
|
||||
req.package.devDependencies &&
|
||||
req.package.devDependencies[name] &&
|
||||
!req.package.dependencies[name]
|
||||
) {
|
||||
return '#DEV:' + req.location
|
||||
} else {
|
||||
return req.location
|
||||
}
|
||||
})
|
||||
.concat(mod.userRequired ? ['#USER'] : [])
|
||||
.sort()
|
||||
pkg._location = mod.location
|
||||
pkg._phantomChildren = {}
|
||||
Object.keys(mod.phantomChildren).sort().forEach(function (name) {
|
||||
pkg._phantomChildren[name] = mod.phantomChildren[name].package.version
|
||||
})
|
||||
pkg._inBundle = !!mod.fromBundle
|
||||
|
||||
// sort keys that are known safe to sort to produce more consistent output
|
||||
sortKeys.forEach(function (key) {
|
||||
if (pkg[key] != null) pkg[key] = deepSortObject(pkg[key])
|
||||
})
|
||||
|
||||
var data = JSON.stringify(sortedObject(pkg), null, 2) + '\n'
|
||||
|
||||
writeFileAtomic(path.resolve(buildpath, 'package.json'), data, {
|
||||
// We really don't need this guarantee, and fsyncing here is super slow. Except on
|
||||
// Windows where there isn't a big performance difference and it prevents errors when
|
||||
// rolling back optional packages (#17671)
|
||||
fsync: isWindows
|
||||
}, next)
|
||||
}
|
73
website/node_modules/npm/lib/install/validate-args.js
generated
vendored
Normal file
73
website/node_modules/npm/lib/install/validate-args.js
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
'use strict'
|
||||
var validate = require('aproba')
|
||||
var asyncMap = require('slide').asyncMap
|
||||
var chain = require('slide').chain
|
||||
var npmInstallChecks = require('npm-install-checks')
|
||||
var iferr = require('iferr')
|
||||
var checkEngine = npmInstallChecks.checkEngine
|
||||
var checkPlatform = npmInstallChecks.checkPlatform
|
||||
var npm = require('../npm.js')
|
||||
|
||||
module.exports = function (idealTree, args, next) {
|
||||
validate('OAF', arguments)
|
||||
var force = npm.config.get('force')
|
||||
|
||||
asyncMap(args, function (pkg, done) {
|
||||
chain([
|
||||
[hasMinimumFields, pkg],
|
||||
[checkSelf, idealTree, pkg, force],
|
||||
[isInstallable, pkg]
|
||||
], done)
|
||||
}, next)
|
||||
}
|
||||
|
||||
function hasMinimumFields (pkg, cb) {
|
||||
if (pkg.name === '' || pkg.name == null) {
|
||||
return cb(new Error(`Can't install ${pkg._resolved}: Missing package name`))
|
||||
} else if (pkg.version === '' || pkg.version == null) {
|
||||
return cb(new Error(`Can't install ${pkg._resolved}: Missing package version`))
|
||||
} else {
|
||||
return cb()
|
||||
}
|
||||
}
|
||||
|
||||
function getWarnings (pkg) {
|
||||
while (pkg.parent) pkg = pkg.parent
|
||||
if (!pkg.warnings) pkg.warnings = []
|
||||
return pkg.warnings
|
||||
}
|
||||
|
||||
var isInstallable = module.exports.isInstallable = function (pkg, next) {
|
||||
var force = npm.config.get('force')
|
||||
var nodeVersion = npm.config.get('node-version')
|
||||
if (/-/.test(nodeVersion)) {
|
||||
// for the purposes of validation, if the node version is a prerelease,
|
||||
// strip that. We check and warn about this sceanrio over in validate-tree.
|
||||
nodeVersion = nodeVersion.replace(/-.*/, '')
|
||||
}
|
||||
var strict = npm.config.get('engine-strict')
|
||||
checkEngine(pkg, npm.version, nodeVersion, force, strict, iferr(next, thenWarnEngineIssues))
|
||||
function thenWarnEngineIssues (warn) {
|
||||
if (warn) getWarnings(pkg).push(warn)
|
||||
checkPlatform(pkg, force, next)
|
||||
}
|
||||
}
|
||||
|
||||
function checkSelf (idealTree, pkg, force, next) {
|
||||
if (idealTree.package && idealTree.package.name !== pkg.name) return next()
|
||||
if (force) {
|
||||
var warn = new Error("Wouldn't install " + pkg.name + ' as a dependency of itself, but being forced')
|
||||
warn.code = 'ENOSELF'
|
||||
idealTree.warnings.push(warn)
|
||||
next()
|
||||
} else {
|
||||
var er = new Error('Refusing to install package with name "' + pkg.name +
|
||||
'" under a package\n' +
|
||||
'also called "' + pkg.name + '". Did you name your project the same\n' +
|
||||
'as the dependency you\'re installing?\n\n' +
|
||||
'For more information, see:\n' +
|
||||
' <https://docs.npmjs.com/cli/install#limitations-of-npms-install-algorithm>')
|
||||
er.code = 'ENOSELF'
|
||||
next(er)
|
||||
}
|
||||
}
|
95
website/node_modules/npm/lib/install/validate-tree.js
generated
vendored
Normal file
95
website/node_modules/npm/lib/install/validate-tree.js
generated
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
'use strict'
|
||||
var path = require('path')
|
||||
var validate = require('aproba')
|
||||
var asyncMap = require('slide').asyncMap
|
||||
var chain = require('slide').chain
|
||||
var npmInstallChecks = require('npm-install-checks')
|
||||
var checkGit = npmInstallChecks.checkGit
|
||||
var clone = require('lodash.clonedeep')
|
||||
var normalizePackageData = require('normalize-package-data')
|
||||
var npm = require('../npm.js')
|
||||
var andFinishTracker = require('./and-finish-tracker.js')
|
||||
var flattenTree = require('./flatten-tree.js')
|
||||
var validateAllPeerDeps = require('./deps.js').validateAllPeerDeps
|
||||
var packageId = require('../utils/package-id.js')
|
||||
|
||||
module.exports = function (idealTree, log, next) {
|
||||
validate('OOF', arguments)
|
||||
var moduleMap = flattenTree(idealTree)
|
||||
var modules = Object.keys(moduleMap).map(function (name) { return moduleMap[name] })
|
||||
|
||||
chain([
|
||||
[asyncMap, modules, function (mod, done) {
|
||||
chain([
|
||||
mod.parent && !mod.isLink && [checkGit, mod.realpath],
|
||||
[checkErrors, mod, idealTree]
|
||||
], done)
|
||||
}],
|
||||
[thenValidateAllPeerDeps, idealTree],
|
||||
[thenCheckTop, idealTree],
|
||||
[thenCheckDuplicateDeps, idealTree]
|
||||
], andFinishTracker(log, next))
|
||||
}
|
||||
|
||||
function checkErrors (mod, idealTree, next) {
|
||||
if (mod.error && (mod.parent || path.resolve(npm.globalDir, '..') !== mod.path)) idealTree.warnings.push(mod.error)
|
||||
next()
|
||||
}
|
||||
|
||||
function thenValidateAllPeerDeps (idealTree, next) {
|
||||
validate('OF', arguments)
|
||||
validateAllPeerDeps(idealTree, function (tree, pkgname, version) {
|
||||
var warn = new Error(packageId(tree) + ' requires a peer of ' + pkgname + '@' +
|
||||
version + ' but none is installed. You must install peer dependencies yourself.')
|
||||
warn.code = 'EPEERINVALID'
|
||||
idealTree.warnings.push(warn)
|
||||
})
|
||||
next()
|
||||
}
|
||||
|
||||
function thenCheckTop (idealTree, next) {
|
||||
validate('OF', arguments)
|
||||
if (idealTree.package.error) return next()
|
||||
|
||||
// FIXME: when we replace read-package-json with something less magic,
|
||||
// this should done elsewhere.
|
||||
// As it is, the package has already been normalized and thus some
|
||||
// errors are suppressed.
|
||||
var pkg = clone(idealTree.package)
|
||||
try {
|
||||
normalizePackageData(pkg, function (warn) {
|
||||
var warnObj = new Error(packageId(idealTree) + ' ' + warn)
|
||||
warnObj.code = 'EPACKAGEJSON'
|
||||
idealTree.warnings.push(warnObj)
|
||||
}, false)
|
||||
} catch (er) {
|
||||
er.code = 'EPACKAGEJSON'
|
||||
idealTree.warnings.push(er)
|
||||
}
|
||||
|
||||
var nodeVersion = npm.config.get('node-version')
|
||||
if (/-/.test(nodeVersion)) {
|
||||
// if this is a prerelease node…
|
||||
var warnObj = new Error('You are using a pre-release version of node and things may not work as expected')
|
||||
warnObj.code = 'ENODEPRE'
|
||||
idealTree.warnings.push(warnObj)
|
||||
}
|
||||
|
||||
next()
|
||||
}
|
||||
|
||||
// check for deps duplciated between devdeps and regular deps
|
||||
function thenCheckDuplicateDeps (idealTree, next) {
|
||||
var deps = idealTree.package.dependencies || {}
|
||||
var devDeps = idealTree.package.devDependencies || {}
|
||||
|
||||
for (var pkg in devDeps) {
|
||||
if (pkg in deps) {
|
||||
var warnObj = new Error('The package ' + pkg + ' is included as both a dev and production dependency.')
|
||||
warnObj.code = 'EDUPLICATEDEP'
|
||||
idealTree.warnings.push(warnObj)
|
||||
}
|
||||
}
|
||||
|
||||
next()
|
||||
}
|
35
website/node_modules/npm/lib/install/writable.js
generated
vendored
Normal file
35
website/node_modules/npm/lib/install/writable.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
'use strict'
|
||||
var path = require('path')
|
||||
var fs = require('fs')
|
||||
var inflight = require('inflight')
|
||||
var accessError = require('./access-error.js')
|
||||
var andIgnoreErrors = require('./and-ignore-errors.js')
|
||||
var isFsAccessAvailable = require('./is-fs-access-available.js')
|
||||
|
||||
if (isFsAccessAvailable) {
|
||||
module.exports = fsAccessImplementation
|
||||
} else {
|
||||
module.exports = fsOpenImplementation
|
||||
}
|
||||
|
||||
// exposed only for testing purposes
|
||||
module.exports.fsAccessImplementation = fsAccessImplementation
|
||||
module.exports.fsOpenImplementation = fsOpenImplementation
|
||||
|
||||
function fsAccessImplementation (dir, done) {
|
||||
done = inflight('writable:' + dir, done)
|
||||
if (!done) return
|
||||
fs.access(dir, fs.W_OK, done)
|
||||
}
|
||||
|
||||
function fsOpenImplementation (dir, done) {
|
||||
done = inflight('writable:' + dir, done)
|
||||
if (!done) return
|
||||
var tmp = path.join(dir, '.npm.check.permissions')
|
||||
fs.open(tmp, 'w', function (er, fd) {
|
||||
if (er) return done(accessError(dir, er))
|
||||
fs.close(fd, function () {
|
||||
fs.unlink(tmp, andIgnoreErrors(done))
|
||||
})
|
||||
})
|
||||
}
|
Reference in New Issue
Block a user