push all website files

This commit is contained in:
Jacob Levine
2019-01-06 13:14:45 -06:00
parent d7301e26c3
commit d2d5d4c04e
15662 changed files with 2166516 additions and 0 deletions

130
website/node_modules/npm/lib/access.js generated vendored Normal file
View File

@@ -0,0 +1,130 @@
'use strict'
/* eslint-disable standard/no-callback-literal */
var resolve = require('path').resolve
var readPackageJson = require('read-package-json')
var mapToRegistry = require('./utils/map-to-registry.js')
var npm = require('./npm.js')
var output = require('./utils/output.js')
var whoami = require('./whoami')
module.exports = access
access.usage =
'npm access public [<package>]\n' +
'npm access restricted [<package>]\n' +
'npm access grant <read-only|read-write> <scope:team> [<package>]\n' +
'npm access revoke <scope:team> [<package>]\n' +
'npm access ls-packages [<user>|<scope>|<scope:team>]\n' +
'npm access ls-collaborators [<package> [<user>]]\n' +
'npm access edit [<package>]'
access.subcommands = ['public', 'restricted', 'grant', 'revoke',
'ls-packages', 'ls-collaborators', 'edit']
access.completion = function (opts, cb) {
var argv = opts.conf.argv.remain
if (argv.length === 2) {
return cb(null, access.subcommands)
}
switch (argv[2]) {
case 'grant':
if (argv.length === 3) {
return cb(null, ['read-only', 'read-write'])
} else {
return cb(null, [])
}
case 'public':
case 'restricted':
case 'ls-packages':
case 'ls-collaborators':
case 'edit':
return cb(null, [])
case 'revoke':
return cb(null, [])
default:
return cb(new Error(argv[2] + ' not recognized'))
}
}
function access (args, cb) {
var cmd = args.shift()
var params
return parseParams(cmd, args, function (err, p) {
if (err) { return cb(err) }
params = p
return mapToRegistry(params.package, npm.config, invokeCmd)
})
function invokeCmd (err, uri, auth, base) {
if (err) { return cb(err) }
params.auth = auth
try {
return npm.registry.access(cmd, uri, params, function (err, data) {
if (!err && data) {
output(JSON.stringify(data, undefined, 2))
}
cb(err, data)
})
} catch (e) {
cb(e.message + '\n\nUsage:\n' + access.usage)
}
}
}
function parseParams (cmd, args, cb) {
// mapToRegistry will complain if package is undefined,
// but it's not needed for ls-packages
var params = { 'package': '' }
if (cmd === 'grant') {
params.permissions = args.shift()
}
if (['grant', 'revoke', 'ls-packages'].indexOf(cmd) !== -1) {
var entity = (args.shift() || '').split(':')
params.scope = entity[0]
params.team = entity[1]
}
if (cmd === 'ls-packages') {
if (!params.scope) {
whoami([], true, function (err, scope) {
params.scope = scope
cb(err, params)
})
} else {
cb(null, params)
}
} else {
getPackage(args.shift(), function (err, pkg) {
if (err) return cb(err)
params.package = pkg
if (cmd === 'ls-collaborators') params.user = args.shift()
cb(null, params)
})
}
}
function getPackage (name, cb) {
if (name && name.trim()) {
cb(null, name.trim())
} else {
readPackageJson(
resolve(npm.prefix, 'package.json'),
function (err, data) {
if (err) {
if (err.code === 'ENOENT') {
cb(new Error('no package name passed to command and no package.json found'))
} else {
cb(err)
}
} else {
cb(null, data.name)
}
}
)
}
}

49
website/node_modules/npm/lib/adduser.js generated vendored Normal file
View File

@@ -0,0 +1,49 @@
module.exports = adduser
var log = require('npmlog')
var npm = require('./npm.js')
var usage = require('./utils/usage')
var crypto
try {
crypto = require('crypto')
} catch (ex) {}
adduser.usage = usage(
'adduser',
'npm adduser [--registry=url] [--scope=@orgname] [--auth-type=legacy] [--always-auth]'
)
function adduser (args, cb) {
if (!crypto) {
return cb(new Error(
'You must compile node with ssl support to use the adduser feature'
))
}
var registry = npm.config.get('registry')
var scope = npm.config.get('scope')
var creds = npm.config.getCredentialsByURI(npm.config.get('registry'))
if (scope) {
var scopedRegistry = npm.config.get(scope + ':registry')
var cliRegistry = npm.config.get('registry', 'cli')
if (scopedRegistry && !cliRegistry) registry = scopedRegistry
}
log.disableProgress()
try {
var auth = require('./auth/' + npm.config.get('auth-type'))
} catch (e) {
return cb(new Error('no such auth module'))
}
auth.login(creds, registry, scope, function (err, newCreds) {
if (err) return cb(err)
npm.config.del('_token', 'user') // prevent legacy pollution
if (scope) npm.config.set(scope + ':registry', registry, 'user')
npm.config.setCredentialsByURI(registry, newCreds)
npm.config.save('user', cb)
})
}

273
website/node_modules/npm/lib/audit.js generated vendored Normal file
View File

@@ -0,0 +1,273 @@
'use strict'
const Bluebird = require('bluebird')
const audit = require('./install/audit.js')
const fs = require('graceful-fs')
const Installer = require('./install.js').Installer
const lockVerify = require('lock-verify')
const log = require('npmlog')
const npa = require('npm-package-arg')
const npm = require('./npm.js')
const output = require('./utils/output.js')
const parseJson = require('json-parse-better-errors')
const readFile = Bluebird.promisify(fs.readFile)
module.exports = auditCmd
const usage = require('./utils/usage')
auditCmd.usage = usage(
'audit',
'\nnpm audit [--json]' +
'\nnpm audit fix ' +
'[--force|--package-lock-only|--dry-run|--production|--only=(dev|prod)]'
)
auditCmd.completion = function (opts, cb) {
const argv = opts.conf.argv.remain
switch (argv[2]) {
case 'audit':
return cb(null, [])
default:
return cb(new Error(argv[2] + ' not recognized'))
}
}
class Auditor extends Installer {
constructor (where, dryrun, args, opts) {
super(where, dryrun, args, opts)
this.deepArgs = (opts && opts.deepArgs) || []
this.runId = opts.runId || ''
this.audit = false
}
loadAllDepsIntoIdealTree (cb) {
Bluebird.fromNode(cb => super.loadAllDepsIntoIdealTree(cb)).then(() => {
if (this.deepArgs && this.deepArgs.length) {
this.deepArgs.forEach(arg => {
arg.reduce((acc, child, ii) => {
if (!acc) {
// We might not always be able to find `target` through the given
// path. If we can't we'll just ignore it.
return
}
const spec = npa(child)
const target = (
acc.requires.find(n => n.package.name === spec.name) ||
acc.requires.find(
n => audit.scrub(n.package.name, this.runId) === spec.name
)
)
if (target && ii === arg.length - 1) {
target.loaded = false
// This kills `hasModernMeta()` and forces a re-fetch
target.package = {
name: spec.name,
version: spec.fetchSpec,
_requested: target.package._requested
}
delete target.fakeChild
let parent = target.parent
while (parent) {
parent.loaded = false
parent = parent.parent
}
target.requiredBy.forEach(par => {
par.loaded = false
delete par.fakeChild
})
}
return target
}, this.idealTree)
})
return Bluebird.fromNode(cb => super.loadAllDepsIntoIdealTree(cb))
}
}).nodeify(cb)
}
// no top level lifecycles on audit
runPreinstallTopLevelLifecycles (cb) { cb() }
runPostinstallTopLevelLifecycles (cb) { cb() }
}
function maybeReadFile (name) {
const file = `${npm.prefix}/${name}`
return readFile(file)
.then((data) => {
try {
return parseJson(data)
} catch (ex) {
ex.code = 'EJSONPARSE'
throw ex
}
})
.catch({code: 'ENOENT'}, () => null)
.catch((ex) => {
ex.file = file
throw ex
})
}
function filterEnv (action) {
const includeDev = npm.config.get('dev') ||
(!/^prod(uction)?$/.test(npm.config.get('only')) && !npm.config.get('production')) ||
/^dev(elopment)?$/.test(npm.config.get('only')) ||
/^dev(elopment)?$/.test(npm.config.get('also'))
const includeProd = !/^dev(elopment)?$/.test(npm.config.get('only'))
const resolves = action.resolves.filter(({dev}) => {
return (dev && includeDev) || (!dev && includeProd)
})
if (resolves.length) {
return Object.assign({}, action, {resolves})
}
}
function auditCmd (args, cb) {
if (npm.config.get('global')) {
const err = new Error('`npm audit` does not support testing globals')
err.code = 'EAUDITGLOBAL'
throw err
}
if (args.length && args[0] !== 'fix') {
return cb(new Error('Invalid audit subcommand: `' + args[0] + '`\n\nUsage:\n' + auditCmd.usage))
}
return Bluebird.all([
maybeReadFile('npm-shrinkwrap.json'),
maybeReadFile('package-lock.json'),
maybeReadFile('package.json')
]).spread((shrinkwrap, lockfile, pkgJson) => {
const sw = shrinkwrap || lockfile
if (!pkgJson) {
const err = new Error('No package.json found: Cannot audit a project without a package.json')
err.code = 'EAUDITNOPJSON'
throw err
}
if (!sw) {
const err = new Error('Neither npm-shrinkwrap.json nor package-lock.json found: Cannot audit a project without a lockfile')
err.code = 'EAUDITNOLOCK'
throw err
} else if (shrinkwrap && lockfile) {
log.warn('audit', 'Both npm-shrinkwrap.json and package-lock.json exist, using npm-shrinkwrap.json.')
}
const requires = Object.assign(
{},
(pkgJson && pkgJson.dependencies) || {},
(pkgJson && pkgJson.devDependencies) || {}
)
return lockVerify(npm.prefix).then((result) => {
if (result.status) return audit.generate(sw, requires)
const lockFile = shrinkwrap ? 'npm-shrinkwrap.json' : 'package-lock.json'
const err = new Error(`Errors were found in your ${lockFile}, run npm install to fix them.\n ` +
result.errors.join('\n '))
err.code = 'ELOCKVERIFY'
throw err
})
}).then((auditReport) => {
return audit.submitForFullReport(auditReport)
}).catch((err) => {
if (err.statusCode === 404 || err.statusCode >= 500) {
const ne = new Error(`Your configured registry (${npm.config.get('registry')}) does not support audit requests.`)
ne.code = 'ENOAUDIT'
ne.wrapped = err
throw ne
}
throw err
}).then((auditResult) => {
if (args[0] === 'fix') {
const actions = (auditResult.actions || []).reduce((acc, action) => {
action = filterEnv(action)
if (!action) { return acc }
if (action.isMajor) {
acc.major.add(`${action.module}@${action.target}`)
action.resolves.forEach(({id, path}) => acc.majorFixes.add(`${id}::${path}`))
} else if (action.action === 'install') {
acc.install.add(`${action.module}@${action.target}`)
action.resolves.forEach(({id, path}) => acc.installFixes.add(`${id}::${path}`))
} else if (action.action === 'update') {
const name = action.module
const version = action.target
action.resolves.forEach(vuln => {
acc.updateFixes.add(`${vuln.id}::${vuln.path}`)
const modPath = vuln.path.split('>')
const newPath = modPath.slice(
0, modPath.indexOf(name)
).concat(`${name}@${version}`)
if (newPath.length === 1) {
acc.install.add(newPath[0])
} else {
acc.update.add(newPath.join('>'))
}
})
} else if (action.action === 'review') {
action.resolves.forEach(({id, path}) => acc.review.add(`${id}::${path}`))
}
return acc
}, {
install: new Set(),
installFixes: new Set(),
update: new Set(),
updateFixes: new Set(),
major: new Set(),
majorFixes: new Set(),
review: new Set()
})
return Bluebird.try(() => {
const installMajor = npm.config.get('force')
const installCount = actions.install.size + (installMajor ? actions.major.size : 0) + actions.update.size
const vulnFixCount = new Set([...actions.installFixes, ...actions.updateFixes, ...(installMajor ? actions.majorFixes : [])]).size
const metavuln = auditResult.metadata.vulnerabilities
const total = Object.keys(metavuln).reduce((acc, key) => acc + metavuln[key], 0)
if (installCount) {
log.verbose(
'audit',
'installing',
[...actions.install, ...(installMajor ? actions.major : []), ...actions.update]
)
}
return Bluebird.fromNode(cb => {
new Auditor(
npm.prefix,
!!npm.config.get('dry-run'),
[...actions.install, ...(installMajor ? actions.major : [])],
{
runId: auditResult.runId,
deepArgs: [...actions.update].map(u => u.split('>'))
}
).run(cb)
}).then(() => {
const numScanned = auditResult.metadata.totalDependencies
if (!npm.config.get('json') && !npm.config.get('parseable')) {
output(`fixed ${vulnFixCount} of ${total} vulnerabilit${total === 1 ? 'y' : 'ies'} in ${numScanned} scanned package${numScanned === 1 ? '' : 's'}`)
if (actions.review.size) {
output(` ${actions.review.size} vulnerabilit${actions.review.size === 1 ? 'y' : 'ies'} required manual review and could not be updated`)
}
if (actions.major.size) {
output(` ${actions.major.size} package update${actions.major.size === 1 ? '' : 's'} for ${actions.majorFixes.size} vuln${actions.majorFixes.size === 1 ? '' : 's'} involved breaking changes`)
if (installMajor) {
output(' (installed due to `--force` option)')
} else {
output(' (use `npm audit fix --force` to install breaking changes;' +
' or refer to `npm audit` for steps to fix these manually)')
}
}
}
})
})
} else {
const levels = ['low', 'moderate', 'high', 'critical']
const minLevel = levels.indexOf(npm.config.get('audit-level'))
const vulns = levels.reduce((count, level, i) => {
return i < minLevel ? count : count + (auditResult.metadata.vulnerabilities[level] || 0)
}, 0)
if (vulns > 0) process.exitCode = 1
if (npm.config.get('parseable')) {
return audit.printParseableReport(auditResult)
} else {
return audit.printFullReport(auditResult)
}
}
}).asCallback(cb)
}

79
website/node_modules/npm/lib/auth/legacy.js generated vendored Normal file
View File

@@ -0,0 +1,79 @@
'use strict'
const read = require('../utils/read-user-info.js')
const profile = require('npm-profile')
const log = require('npmlog')
const npm = require('../npm.js')
const output = require('../utils/output.js')
const pacoteOpts = require('../config/pacote')
const fetchOpts = require('../config/fetch-opts')
const openUrl = require('../utils/open-url')
const openerPromise = (url) => new Promise((resolve, reject) => {
openUrl(url, 'to complete your login please visit', (er) => er ? reject(er) : resolve())
})
const loginPrompter = (creds) => {
const opts = { log: log }
return read.username('Username:', creds.username, opts).then((u) => {
creds.username = u
return read.password('Password:', creds.password)
}).then((p) => {
creds.password = p
return read.email('Email: (this IS public) ', creds.email, opts)
}).then((e) => {
creds.email = e
return creds
})
}
module.exports.login = (creds, registry, scope, cb) => {
const conf = {
log: log,
creds: creds,
registry: registry,
auth: {
otp: npm.config.get('otp')
},
scope: scope,
opts: fetchOpts.fromPacote(pacoteOpts())
}
login(conf).then((newCreds) => cb(null, newCreds)).catch(cb)
}
function login (conf) {
return profile.login(openerPromise, loginPrompter, conf)
.catch((err) => {
if (err.code === 'EOTP') throw err
const u = conf.creds.username
const p = conf.creds.password
const e = conf.creds.email
if (!(u && p && e)) throw err
return profile.adduserCouch(u, e, p, conf)
})
.catch((err) => {
if (err.code !== 'EOTP') throw err
return read.otp('Enter one-time password from your authenticator app: ').then((otp) => {
conf.auth.otp = otp
const u = conf.creds.username
const p = conf.creds.password
return profile.loginCouch(u, p, conf)
})
}).then((result) => {
const newCreds = {}
if (result && result.token) {
newCreds.token = result.token
} else {
newCreds.username = conf.creds.username
newCreds.password = conf.creds.password
newCreds.email = conf.creds.email
newCreds.alwaysAuth = npm.config.get('always-auth')
}
const usermsg = conf.creds.username ? ' user ' + conf.creds.username : ''
conf.log.info('login', 'Authorized' + usermsg)
const scopeMessage = conf.scope ? ' to scope ' + conf.scope : ''
const userout = conf.creds.username ? ' as ' + conf.creds.username : ''
output('Logged in%s%s on %s.', userout, scopeMessage, conf.registry)
return newCreds
})
}

7
website/node_modules/npm/lib/auth/oauth.js generated vendored Normal file
View File

@@ -0,0 +1,7 @@
var ssoAuth = require('./sso')
var npm = require('../npm')
module.exports.login = function login () {
npm.config.set('sso-type', 'oauth')
ssoAuth.login.apply(this, arguments)
}

7
website/node_modules/npm/lib/auth/saml.js generated vendored Normal file
View File

@@ -0,0 +1,7 @@
var ssoAuth = require('./sso')
var npm = require('../npm')
module.exports.login = function login () {
npm.config.set('sso-type', 'saml')
ssoAuth.login.apply(this, arguments)
}

56
website/node_modules/npm/lib/auth/sso.js generated vendored Normal file
View File

@@ -0,0 +1,56 @@
var log = require('npmlog')
var npm = require('../npm.js')
var output = require('../utils/output')
var openUrl = require('../utils/open-url')
module.exports.login = function login (creds, registry, scope, cb) {
var ssoType = npm.config.get('sso-type')
if (!ssoType) { return cb(new Error('Missing option: sso-type')) }
var params = {
// We're reusing the legacy login endpoint, so we need some dummy
// stuff here to pass validation. They're never used.
auth: {
username: 'npm_' + ssoType + '_auth_dummy_user',
password: 'placeholder',
email: 'support@npmjs.com',
authType: ssoType
}
}
npm.registry.adduser(registry, params, function (er, doc) {
if (er) return cb(er)
if (!doc || !doc.token) return cb(new Error('no SSO token returned'))
if (!doc.sso) return cb(new Error('no SSO URL returned by services'))
openUrl(doc.sso, 'to complete your login please visit', function () {
pollForSession(registry, doc.token, function (err, username) {
if (err) return cb(err)
log.info('adduser', 'Authorized user %s', username)
var scopeMessage = scope ? ' to scope ' + scope : ''
output('Logged in as %s%s on %s.', username, scopeMessage, registry)
cb(null, { token: doc.token })
})
})
})
}
function pollForSession (registry, token, cb) {
log.info('adduser', 'Polling for validated SSO session')
npm.registry.whoami(registry, {
auth: {
token: token
}
}, function (er, username) {
if (er && er.statusCode !== 401) {
cb(er)
} else if (!username) {
setTimeout(function () {
pollForSession(registry, token, cb)
}, npm.config.get('sso-poll-frequency'))
} else {
cb(null, username)
}
})
}

23
website/node_modules/npm/lib/bin.js generated vendored Normal file
View File

@@ -0,0 +1,23 @@
module.exports = bin
var npm = require('./npm.js')
var osenv = require('osenv')
var output = require('./utils/output.js')
bin.usage = 'npm bin [--global]'
function bin (args, silent, cb) {
if (typeof cb !== 'function') {
cb = silent
silent = false
}
var b = npm.bin
var PATH = osenv.path()
if (!silent) output(b)
process.nextTick(cb.bind(this, null, b))
if (npm.config.get('global') && PATH.indexOf(b) === -1) {
npm.config.get('logstream').write('(not in PATH env variable)\n')
}
}

31
website/node_modules/npm/lib/bugs.js generated vendored Normal file
View File

@@ -0,0 +1,31 @@
module.exports = bugs
var log = require('npmlog')
var openUrl = require('./utils/open-url')
var fetchPackageMetadata = require('./fetch-package-metadata.js')
var usage = require('./utils/usage')
bugs.usage = usage(
'bugs',
'npm bugs [<pkgname>]'
)
bugs.completion = function (opts, cb) {
// FIXME: there used to be registry completion here, but it stopped making
// sense somewhere around 50,000 packages on the registry
cb()
}
function bugs (args, cb) {
var n = args.length ? args[0] : '.'
fetchPackageMetadata(n, '.', {fullMetadata: true}, function (er, d) {
if (er) return cb(er)
var url = d.bugs && ((typeof d.bugs === 'string') ? d.bugs : d.bugs.url)
if (!url) {
url = 'https://www.npmjs.org/package/' + d.name
}
log.silly('bugs', 'url', url)
openUrl(url, 'bug list available at the following URL', cb)
})
}

140
website/node_modules/npm/lib/build.js generated vendored Normal file
View File

@@ -0,0 +1,140 @@
// npm build command
// everything about the installation after the creation of
// the .npm/{name}/{version}/package folder.
// linking the modules into the npm.root,
// resolving dependencies, etc.
// This runs AFTER install or link are completed.
var npm = require('./npm.js')
var log = require('npmlog')
var chain = require('slide').chain
var path = require('path')
var fs = require('graceful-fs')
var lifecycle = require('./utils/lifecycle.js')
var readJson = require('read-package-json')
var binLinks = require('bin-links')
var binLinksConfig = require('./config/bin-links.js')
var ini = require('ini')
var writeFile = require('write-file-atomic')
module.exports = build
build.usage = 'npm build [<folder>]'
build._didBuild = {}
build._noLC = {}
function build (args, global, didPre, didRB, cb) {
if (typeof cb !== 'function') {
cb = didRB
didRB = false
}
if (typeof cb !== 'function') {
cb = didPre
didPre = false
}
if (typeof cb !== 'function') {
cb = global
global = npm.config.get('global')
}
if (!args.length) {
readJson(path.resolve(npm.localPrefix, 'package.json'), function (er, pkg) {
if (!args.length && pkg && pkg.scripts && pkg.scripts.build) {
log.warn('build', '`npm build` called with no arguments. Did you mean to `npm run-script build`?')
}
cb()
})
} else {
// it'd be nice to asyncMap these, but actually, doing them
// in parallel generally munges up the output from node-waf
var builder = build_(global, didPre, didRB)
chain(args.map(function (arg) {
return function (cb) {
builder(arg, cb)
}
}), cb)
}
}
function build_ (global, didPre, didRB) {
return function (folder, cb) {
folder = path.resolve(folder)
if (build._didBuild[folder]) log.info('build', 'already built', folder)
build._didBuild[folder] = true
log.info('build', folder)
readJson(path.resolve(folder, 'package.json'), function (er, pkg) {
if (er) return cb(er)
chain([
!didPre && [lifecycle, pkg, 'preinstall', folder],
[linkStuff, pkg, folder, global],
!didRB && [rebuildBundles, pkg, folder],
[writeBuiltinConf, pkg, folder],
didPre !== build._noLC && [lifecycle, pkg, 'install', folder],
didPre !== build._noLC && [lifecycle, pkg, 'postinstall', folder]
],
cb)
})
}
}
var writeBuiltinConf = build.writeBuiltinConf = function (pkg, folder, cb) {
// the builtin config is "sticky". Any time npm installs
// itself globally, it puts its builtin config file there
var parent = path.dirname(folder)
var dir = npm.globalDir
// Make this count for canary, too
if ((pkg.name !== 'npm' && pkg.name !== 'npmc') ||
!npm.config.get('global') ||
!npm.config.usingBuiltin ||
dir !== parent) {
return cb()
}
var data = ini.stringify(npm.config.sources.builtin.data)
writeFile(path.resolve(folder, 'npmrc'), data, cb)
}
var linkStuff = build.linkStuff = function (pkg, folder, global, cb) {
// allow to opt out of linking binaries.
if (npm.config.get('bin-links') === false) return cb()
return binLinks(pkg, folder, global, binLinksConfig(pkg), cb)
}
function rebuildBundles (pkg, folder, cb) {
if (!npm.config.get('rebuild-bundle')) return cb()
var deps = Object.keys(pkg.dependencies || {})
.concat(Object.keys(pkg.devDependencies || {}))
var bundles = pkg.bundleDependencies || pkg.bundledDependencies || []
fs.readdir(path.resolve(folder, 'node_modules'), function (er, files) {
// error means no bundles
if (er) return cb()
log.verbose('rebuildBundles', files)
// don't asyncMap these, because otherwise build script output
// gets interleaved and is impossible to read
chain(files.filter(function (file) {
// rebuild if:
// not a .folder, like .bin or .hooks
return !file.match(/^[._-]/) &&
// not some old 0.x style bundle
file.indexOf('@') === -1 &&
// either not a dep, or explicitly bundled
(deps.indexOf(file) === -1 || bundles.indexOf(file) !== -1)
}).map(function (file) {
file = path.resolve(folder, 'node_modules', file)
return function (cb) {
if (build._didBuild[file]) return cb()
log.verbose('rebuild bundle', file)
// if file is not a package dir, then don't do it.
fs.lstat(path.resolve(file, 'package.json'), function (er) {
if (er) return cb()
build_(false)(file, cb)
})
}
}), cb)
})
}

137
website/node_modules/npm/lib/cache.js generated vendored Normal file
View File

@@ -0,0 +1,137 @@
'use strict'
/* eslint-disable standard/no-callback-literal */
const BB = require('bluebird')
const assert = require('assert')
const cacache = require('cacache')
const finished = BB.promisify(require('mississippi').finished)
const log = require('npmlog')
const npa = require('npm-package-arg')
const npm = require('./npm.js')
const output = require('./utils/output.js')
const pacote = require('pacote')
const pacoteOpts = require('./config/pacote')
const path = require('path')
const rm = BB.promisify(require('./utils/gently-rm.js'))
const unbuild = BB.promisify(npm.commands.unbuild)
cache.usage = 'npm cache add <tarball file>' +
'\nnpm cache add <folder>' +
'\nnpm cache add <tarball url>' +
'\nnpm cache add <git url>' +
'\nnpm cache add <name>@<version>' +
'\nnpm cache clean' +
'\nnpm cache verify'
cache.completion = function (opts, cb) {
var argv = opts.conf.argv.remain
if (argv.length === 2) {
return cb(null, ['add', 'clean'])
}
// TODO - eventually...
switch (argv[2]) {
case 'clean':
case 'add':
return cb(null, [])
}
}
exports = module.exports = cache
function cache (args, cb) {
const cmd = args.shift()
let result
switch (cmd) {
case 'rm': case 'clear': case 'clean':
result = clean(args)
break
case 'add':
result = add(args, npm.prefix)
break
case 'verify': case 'check':
result = verify()
break
default: return cb('Usage: ' + cache.usage)
}
if (!result || !result.then) {
throw new Error(`npm cache ${cmd} handler did not return a Promise`)
}
result.then(() => cb(), cb)
}
// npm cache clean [pkg]*
cache.clean = clean
function clean (args) {
if (!args) args = []
if (args.length) {
return BB.reject(new Error('npm cache clear does not accept arguments'))
}
const cachePath = path.join(npm.cache, '_cacache')
if (!npm.config.get('force')) {
return BB.reject(new Error("As of npm@5, the npm cache self-heals from corruption issues and data extracted from the cache is guaranteed to be valid. If you want to make sure everything is consistent, use 'npm cache verify' instead. On the other hand, if you're debugging an issue with the installer, you can use `npm install --cache /tmp/empty-cache` to use a temporary cache instead of nuking the actual one.\n\nIf you're sure you want to delete the entire cache, rerun this command with --force."))
}
// TODO - remove specific packages or package versions
return rm(cachePath)
}
// npm cache add <tarball-url>
// npm cache add <pkg> <ver>
// npm cache add <tarball>
// npm cache add <folder>
cache.add = function (pkg, ver, where, scrub) {
assert(typeof pkg === 'string', 'must include name of package to install')
if (scrub) {
return clean([]).then(() => {
return add([pkg, ver], where)
})
}
return add([pkg, ver], where)
}
function add (args, where) {
var usage = 'Usage:\n' +
' npm cache add <tarball-url>\n' +
' npm cache add <pkg>@<ver>\n' +
' npm cache add <tarball>\n' +
' npm cache add <folder>\n'
var spec
log.silly('cache add', 'args', args)
if (args[1] === undefined) args[1] = null
// at this point the args length must ==2
if (args[1] !== null) {
spec = args[0] + '@' + args[1]
} else if (args.length === 2) {
spec = args[0]
}
log.verbose('cache add', 'spec', spec)
if (!spec) return BB.reject(new Error(usage))
log.silly('cache add', 'parsed spec', spec)
return finished(pacote.tarball.stream(spec, pacoteOpts({where})).resume())
}
cache.verify = verify
function verify () {
const cache = path.join(npm.config.get('cache'), '_cacache')
let prefix = cache
if (prefix.indexOf(process.env.HOME) === 0) {
prefix = '~' + prefix.substr(process.env.HOME.length)
}
return cacache.verify(cache).then((stats) => {
output(`Cache verified and compressed (${prefix}):`)
output(`Content verified: ${stats.verifiedContent} (${stats.keptSize} bytes)`)
stats.badContentCount && output(`Corrupted content removed: ${stats.badContentCount}`)
stats.reclaimedCount && output(`Content garbage-collected: ${stats.reclaimedCount} (${stats.reclaimedSize} bytes)`)
stats.missingContent && output(`Missing content: ${stats.missingContent}`)
output(`Index entries: ${stats.totalEntries}`)
output(`Finished in ${stats.runTime.total / 1000}s`)
})
}
cache.unpack = unpack
function unpack (pkg, ver, unpackTarget, dmode, fmode, uid, gid) {
return unbuild([unpackTarget], true).then(() => {
const opts = pacoteOpts({dmode, fmode, uid, gid, offline: true})
return pacote.extract(npa.resolve(pkg, ver), unpackTarget, opts)
})
}

40
website/node_modules/npm/lib/ci.js generated vendored Normal file
View File

@@ -0,0 +1,40 @@
'use strict'
const Installer = require('libcipm')
const lifecycleOpts = require('./config/lifecycle.js')
const npm = require('./npm.js')
const npmlog = require('npmlog')
const pacoteOpts = require('./config/pacote.js')
ci.usage = 'npm ci'
ci.completion = (cb) => cb(null, [])
Installer.CipmConfig.impl(npm.config, {
get: npm.config.get,
set: npm.config.set,
toLifecycle (moreOpts) {
return lifecycleOpts(moreOpts)
},
toPacote (moreOpts) {
return pacoteOpts(moreOpts)
}
})
module.exports = ci
function ci (args, cb) {
return new Installer({
config: npm.config,
log: npmlog
})
.run()
.then(
(details) => {
npmlog.disableProgress()
console.log(`added ${details.pkgCount} packages in ${
details.runTime / 1000
}s`)
}
)
.then(() => cb(), cb)
}

248
website/node_modules/npm/lib/completion.js generated vendored Normal file
View File

@@ -0,0 +1,248 @@
module.exports = completion
completion.usage = 'source <(npm completion)'
var npm = require('./npm.js')
var npmconf = require('./config/core.js')
var configDefs = npmconf.defs
var configTypes = configDefs.types
var shorthands = configDefs.shorthands
var nopt = require('nopt')
var configNames = Object.keys(configTypes)
.filter(function (e) { return e.charAt(0) !== '_' })
var shorthandNames = Object.keys(shorthands)
var allConfs = configNames.concat(shorthandNames)
var once = require('once')
var isWindowsShell = require('./utils/is-windows-shell.js')
var output = require('./utils/output.js')
completion.completion = function (opts, cb) {
if (opts.w > 3) return cb()
var fs = require('graceful-fs')
var path = require('path')
var bashExists = null
var zshExists = null
fs.stat(path.resolve(process.env.HOME, '.bashrc'), function (er) {
bashExists = !er
next()
})
fs.stat(path.resolve(process.env.HOME, '.zshrc'), function (er) {
zshExists = !er
next()
})
function next () {
if (zshExists === null || bashExists === null) return
var out = []
if (zshExists) out.push('~/.zshrc')
if (bashExists) out.push('~/.bashrc')
if (opts.w === 2) {
out = out.map(function (m) {
return ['>>', m]
})
}
cb(null, out)
}
}
function completion (args, cb) {
if (isWindowsShell) {
var e = new Error('npm completion supported only in MINGW / Git bash on Windows')
e.code = 'ENOTSUP'
e.errno = require('constants').ENOTSUP // eslint-disable-line node/no-deprecated-api
return cb(e)
}
// if the COMP_* isn't in the env, then just dump the script.
if (process.env.COMP_CWORD === undefined ||
process.env.COMP_LINE === undefined ||
process.env.COMP_POINT === undefined) {
return dumpScript(cb)
}
console.error(process.env.COMP_CWORD)
console.error(process.env.COMP_LINE)
console.error(process.env.COMP_POINT)
// get the partial line and partial word,
// if the point isn't at the end.
// ie, tabbing at: npm foo b|ar
var w = +process.env.COMP_CWORD
var words = args.map(unescape)
var word = words[w]
var line = process.env.COMP_LINE
var point = +process.env.COMP_POINT
var partialLine = line.substr(0, point)
var partialWords = words.slice(0, w)
// figure out where in that last word the point is.
var partialWord = args[w]
var i = partialWord.length
while (partialWord.substr(0, i) !== partialLine.substr(-1 * i) && i > 0) {
i--
}
partialWord = unescape(partialWord.substr(0, i))
partialWords.push(partialWord)
var opts = {
words: words,
w: w,
word: word,
line: line,
lineLength: line.length,
point: point,
partialLine: partialLine,
partialWords: partialWords,
partialWord: partialWord,
raw: args
}
cb = wrapCb(cb, opts)
console.error(opts)
if (partialWords.slice(0, -1).indexOf('--') === -1) {
if (word.charAt(0) === '-') return configCompl(opts, cb)
if (words[w - 1] &&
words[w - 1].charAt(0) === '-' &&
!isFlag(words[w - 1])) {
// awaiting a value for a non-bool config.
// don't even try to do this for now
console.error('configValueCompl')
return configValueCompl(opts, cb)
}
}
// try to find the npm command.
// it's the first thing after all the configs.
// take a little shortcut and use npm's arg parsing logic.
// don't have to worry about the last arg being implicitly
// boolean'ed, since the last block will catch that.
var parsed = opts.conf =
nopt(configTypes, shorthands, partialWords.slice(0, -1), 0)
// check if there's a command already.
console.error(parsed)
var cmd = parsed.argv.remain[1]
if (!cmd) return cmdCompl(opts, cb)
Object.keys(parsed).forEach(function (k) {
npm.config.set(k, parsed[k])
})
// at this point, if words[1] is some kind of npm command,
// then complete on it.
// otherwise, do nothing
cmd = npm.commands[cmd]
if (cmd && cmd.completion) return cmd.completion(opts, cb)
// nothing to do.
cb()
}
function dumpScript (cb) {
var fs = require('graceful-fs')
var path = require('path')
var p = path.resolve(__dirname, 'utils/completion.sh')
// The Darwin patch below results in callbacks first for the write and then
// for the error handler, so make sure we only call our callback once.
cb = once(cb)
fs.readFile(p, 'utf8', function (er, d) {
if (er) return cb(er)
d = d.replace(/^#!.*?\n/, '')
process.stdout.write(d, function () { cb() })
process.stdout.on('error', function (er) {
// Darwin is a pain sometimes.
//
// This is necessary because the "source" or "." program in
// bash on OS X closes its file argument before reading
// from it, meaning that you get exactly 1 write, which will
// work most of the time, and will always raise an EPIPE.
//
// Really, one should not be tossing away EPIPE errors, or any
// errors, so casually. But, without this, `. <(npm completion)`
// can never ever work on OS X.
if (er.errno === 'EPIPE') er = null
cb(er)
})
})
}
function unescape (w) {
if (w.charAt(0) === '\'') return w.replace(/^'|'$/g, '')
else return w.replace(/\\ /g, ' ')
}
function escape (w) {
if (!w.match(/\s+/)) return w
return '\'' + w + '\''
}
// The command should respond with an array. Loop over that,
// wrapping quotes around any that have spaces, and writing
// them to stdout. Use console.log, not the outfd config.
// If any of the items are arrays, then join them with a space.
// Ie, returning ['a', 'b c', ['d', 'e']] would allow it to expand
// to: 'a', 'b c', or 'd' 'e'
function wrapCb (cb, opts) {
return function (er, compls) {
if (!Array.isArray(compls)) compls = compls ? [compls] : []
compls = compls.map(function (c) {
if (Array.isArray(c)) c = c.map(escape).join(' ')
else c = escape(c)
return c
})
if (opts.partialWord) {
compls = compls.filter(function (c) {
return c.indexOf(opts.partialWord) === 0
})
}
console.error([er && er.stack, compls, opts.partialWord])
if (er || compls.length === 0) return cb(er)
output(compls.join('\n'))
cb()
}
}
// the current word has a dash. Return the config names,
// with the same number of dashes as the current word has.
function configCompl (opts, cb) {
var word = opts.word
var split = word.match(/^(-+)((?:no-)*)(.*)$/)
var dashes = split[1]
var no = split[2]
var flags = configNames.filter(isFlag)
console.error(flags)
return cb(null, allConfs.map(function (c) {
return dashes + c
}).concat(flags.map(function (f) {
return dashes + (no || 'no-') + f
})))
}
// expand with the valid values of various config values.
// not yet implemented.
function configValueCompl (opts, cb) {
console.error('configValue', opts)
return cb(null, [])
}
// check if the thing is a flag or not.
function isFlag (word) {
// shorthands never take args.
var split = word.match(/^(-*)((?:no-)+)?(.*)$/)
var no = split[2]
var conf = split[3]
return no || configTypes[conf] === Boolean || shorthands[conf]
}
// complete against the npm commands
function cmdCompl (opts, cb) {
return cb(null, npm.fullList)
}

286
website/node_modules/npm/lib/config.js generated vendored Normal file
View File

@@ -0,0 +1,286 @@
/* eslint-disable standard/no-callback-literal */
module.exports = config
var log = require('npmlog')
var npm = require('./npm.js')
var npmconf = require('./config/core.js')
var fs = require('graceful-fs')
var writeFileAtomic = require('write-file-atomic')
var types = npmconf.defs.types
var ini = require('ini')
var editor = require('editor')
var os = require('os')
var path = require('path')
var mkdirp = require('mkdirp')
var umask = require('./utils/umask')
var usage = require('./utils/usage')
var output = require('./utils/output')
var noProgressTillDone = require('./utils/no-progress-while-running').tillDone
config.usage = usage(
'config',
'npm config set <key> <value>' +
'\nnpm config get [<key>]' +
'\nnpm config delete <key>' +
'\nnpm config list [--json]' +
'\nnpm config edit' +
'\nnpm set <key> <value>' +
'\nnpm get [<key>]'
)
config.completion = function (opts, cb) {
var argv = opts.conf.argv.remain
if (argv[1] !== 'config') argv.unshift('config')
if (argv.length === 2) {
var cmds = ['get', 'set', 'delete', 'ls', 'rm', 'edit']
if (opts.partialWord !== 'l') cmds.push('list')
return cb(null, cmds)
}
var action = argv[2]
switch (action) {
case 'set':
// todo: complete with valid values, if possible.
if (argv.length > 3) return cb(null, [])
// fallthrough
/* eslint no-fallthrough:0 */
case 'get':
case 'delete':
case 'rm':
return cb(null, Object.keys(types))
case 'edit':
case 'list':
case 'ls':
return cb(null, [])
default:
return cb(null, [])
}
}
// npm config set key value
// npm config get key
// npm config list
function config (args, cb) {
var action = args.shift()
switch (action) {
case 'set':
return set(args[0], args[1], cb)
case 'get':
return get(args[0], cb)
case 'delete':
case 'rm':
case 'del':
return del(args[0], cb)
case 'list':
case 'ls':
return npm.config.get('json') ? listJson(cb) : list(cb)
case 'edit':
return edit(cb)
default:
return unknown(action, cb)
}
}
function edit (cb) {
var e = npm.config.get('editor')
var which = npm.config.get('global') ? 'global' : 'user'
var f = npm.config.get(which + 'config')
if (!e) return cb(new Error('No EDITOR config or environ set.'))
npm.config.save(which, function (er) {
if (er) return cb(er)
fs.readFile(f, 'utf8', function (er, data) {
if (er) data = ''
data = [
';;;;',
'; npm ' + (npm.config.get('global')
? 'globalconfig' : 'userconfig') + ' file',
'; this is a simple ini-formatted file',
'; lines that start with semi-colons are comments.',
'; read `npm help config` for help on the various options',
';;;;',
'',
data
].concat([
';;;;',
'; all options with default values',
';;;;'
]).concat(Object.keys(npmconf.defaults).reduce(function (arr, key) {
var obj = {}
obj[key] = npmconf.defaults[key]
if (key === 'logstream') return arr
return arr.concat(
ini.stringify(obj)
.replace(/\n$/m, '')
.replace(/^/g, '; ')
.replace(/\n/g, '\n; ')
.split('\n'))
}, []))
.concat([''])
.join(os.EOL)
mkdirp(path.dirname(f), function (er) {
if (er) return cb(er)
writeFileAtomic(
f,
data,
function (er) {
if (er) return cb(er)
editor(f, { editor: e }, noProgressTillDone(cb))
}
)
})
})
})
}
function del (key, cb) {
if (!key) return cb(new Error('no key provided'))
var where = npm.config.get('global') ? 'global' : 'user'
npm.config.del(key, where)
npm.config.save(where, cb)
}
function set (key, val, cb) {
if (key === undefined) {
return unknown('', cb)
}
if (val === undefined) {
if (key.indexOf('=') !== -1) {
var k = key.split('=')
key = k.shift()
val = k.join('=')
} else {
val = ''
}
}
key = key.trim()
val = val.trim()
log.info('config', 'set %j %j', key, val)
var where = npm.config.get('global') ? 'global' : 'user'
if (key.match(/umask/)) val = umask.fromString(val)
npm.config.set(key, val, where)
npm.config.save(where, cb)
}
function get (key, cb) {
if (!key) return list(cb)
if (!publicVar(key)) {
return cb(new Error('---sekretz---'))
}
var val = npm.config.get(key)
if (key.match(/umask/)) val = umask.toString(val)
output(val)
cb()
}
function sort (a, b) {
return a > b ? 1 : -1
}
function publicVar (k) {
return !(k.charAt(0) === '_' || k.indexOf(':_') !== -1)
}
function getKeys (data) {
return Object.keys(data).filter(publicVar).sort(sort)
}
function listJson (cb) {
const publicConf = npm.config.keys.reduce((publicConf, k) => {
var value = npm.config.get(k)
if (publicVar(k) &&
// argv is not really config, it's command config
k !== 'argv' &&
// logstream is a Stream, and would otherwise produce circular refs
k !== 'logstream') publicConf[k] = value
return publicConf
}, {})
output(JSON.stringify(publicConf, null, 2))
return cb()
}
function listFromSource (title, conf, long) {
var confKeys = getKeys(conf)
var msg = ''
if (confKeys.length) {
msg += '; ' + title + '\n'
confKeys.forEach(function (k) {
var val = JSON.stringify(conf[k])
if (conf[k] !== npm.config.get(k)) {
if (!long) return
msg += '; ' + k + ' = ' + val + ' (overridden)\n'
} else msg += k + ' = ' + val + '\n'
})
msg += '\n'
}
return msg
}
function list (cb) {
var msg = ''
var long = npm.config.get('long')
var cli = npm.config.sources.cli.data
var cliKeys = getKeys(cli)
if (cliKeys.length) {
msg += '; cli configs\n'
cliKeys.forEach(function (k) {
if (cli[k] && typeof cli[k] === 'object') return
if (k === 'argv') return
msg += k + ' = ' + JSON.stringify(cli[k]) + '\n'
})
msg += '\n'
}
// env configs
msg += listFromSource('environment configs', npm.config.sources.env.data, long)
// project config file
var project = npm.config.sources.project
msg += listFromSource('project config ' + project.path, project.data, long)
// user config file
msg += listFromSource('userconfig ' + npm.config.get('userconfig'), npm.config.sources.user.data, long)
// global config file
msg += listFromSource('globalconfig ' + npm.config.get('globalconfig'), npm.config.sources.global.data, long)
// builtin config file
var builtin = npm.config.sources.builtin || {}
if (builtin && builtin.data) {
msg += listFromSource('builtin config ' + builtin.path, builtin.data, long)
}
// only show defaults if --long
if (!long) {
msg += '; node bin location = ' + process.execPath + '\n' +
'; cwd = ' + process.cwd() + '\n' +
'; HOME = ' + process.env.HOME + '\n' +
'; "npm config ls -l" to show all defaults.\n'
output(msg)
return cb()
}
var defaults = npmconf.defaults
var defKeys = getKeys(defaults)
msg += '; default values\n'
defKeys.forEach(function (k) {
if (defaults[k] && typeof defaults[k] === 'object') return
var val = JSON.stringify(defaults[k])
if (defaults[k] !== npm.config.get(k)) {
msg += '; ' + k + ' = ' + val + ' (overridden)\n'
} else msg += k + ' = ' + val + '\n'
})
msg += '\n'
output(msg)
return cb()
}
function unknown (action, cb) {
cb('Usage:\n' + config.usage)
}

32
website/node_modules/npm/lib/config/bin-links.js generated vendored Normal file
View File

@@ -0,0 +1,32 @@
'use strict'
const npm = require('../npm.js')
var packageId = require('../utils/package-id.js')
const log = require('npmlog')
module.exports = binLinksOpts
function binLinksOpts (pkg) {
return {
ignoreScripts: npm.config.get('ignore-scripts'),
force: npm.config.get('force'),
globalBin: npm.globalBin,
globalDir: npm.globalDir,
json: npm.config.get('json'),
log: log,
name: 'npm',
parseable: npm.config.get('parseable'),
pkgId: packageId(pkg),
prefix: npm.config.get('prefix'),
prefixes: [
npm.prefix,
npm.globalPrefix,
npm.dir,
npm.root,
npm.globalDir,
npm.bin,
npm.globalBin
],
umask: npm.config.get('umask')
}
}

View File

@@ -0,0 +1,16 @@
var assert = require('assert')
var toNerfDart = require('./nerf-dart.js')
module.exports = clearCredentialsByURI
function clearCredentialsByURI (uri) {
assert(uri && typeof uri === 'string', 'registry URL is required')
var nerfed = toNerfDart(uri)
this.del(nerfed + ':_authToken', 'user')
this.del(nerfed + ':_password', 'user')
this.del(nerfed + ':username', 'user')
this.del(nerfed + ':email', 'user')
}

132
website/node_modules/npm/lib/config/cmd-list.js generated vendored Normal file
View File

@@ -0,0 +1,132 @@
// short names for common things
var shorthands = {
'un': 'uninstall',
'rb': 'rebuild',
'list': 'ls',
'ln': 'link',
'create': 'init',
'i': 'install',
'it': 'install-test',
'cit': 'install-ci-test',
'up': 'update',
'c': 'config',
's': 'search',
'se': 'search',
'unstar': 'star', // same function
'tst': 'test',
't': 'test',
'ddp': 'dedupe',
'v': 'view',
'run': 'run-script',
'clean-install': 'ci',
'clean-install-test': 'cit'
}
var affordances = {
'la': 'ls',
'll': 'ls',
'verison': 'version',
'ic': 'ci',
'innit': 'init',
'isntall': 'install',
'install-clean': 'ci',
'isntall-clean': 'ci',
'dist-tags': 'dist-tag',
'apihelp': 'help',
'find-dupes': 'dedupe',
'upgrade': 'update',
'udpate': 'update',
'login': 'adduser',
'add-user': 'adduser',
'author': 'owner',
'home': 'docs',
'issues': 'bugs',
'info': 'view',
'show': 'view',
'find': 'search',
'add': 'install',
'unlink': 'uninstall',
'remove': 'uninstall',
'rm': 'uninstall',
'r': 'uninstall',
'rum': 'run-script',
'sit': 'cit'
}
// these are filenames in .
var cmdList = [
'ci',
'install-ci-test',
'install',
'install-test',
'uninstall',
'cache',
'config',
'set',
'get',
'update',
'outdated',
'prune',
'pack',
'dedupe',
'hook',
'rebuild',
'link',
'publish',
'star',
'stars',
'adduser',
'login', // This is an alias for `adduser` but it can be confusing
'logout',
'unpublish',
'owner',
'access',
'team',
'deprecate',
'shrinkwrap',
'token',
'profile',
'audit',
'help',
'help-search',
'ls',
'search',
'view',
'init',
'version',
'edit',
'explore',
'docs',
'repo',
'bugs',
'root',
'prefix',
'bin',
'whoami',
'dist-tag',
'ping',
'test',
'stop',
'start',
'restart',
'run-script',
'completion',
'doctor'
]
var plumbing = [
'build',
'unbuild',
'xmas',
'substack',
'visnup'
]
module.exports.aliases = Object.assign({}, shorthands, affordances)
module.exports.shorthands = shorthands
module.exports.affordances = affordances
module.exports.cmdList = cmdList
module.exports.plumbing = plumbing

429
website/node_modules/npm/lib/config/core.js generated vendored Normal file
View File

@@ -0,0 +1,429 @@
var CC = require('config-chain').ConfigChain
var inherits = require('inherits')
var configDefs = require('./defaults.js')
var types = configDefs.types
var once = require('once')
var fs = require('fs')
var path = require('path')
var nopt = require('nopt')
var ini = require('ini')
var Umask = configDefs.Umask
var mkdirp = require('mkdirp')
var umask = require('../utils/umask')
var isWindows = require('../utils/is-windows.js')
exports.load = load
exports.Conf = Conf
exports.loaded = false
exports.rootConf = null
exports.usingBuiltin = false
exports.defs = configDefs
Object.defineProperty(exports, 'defaults', { get: function () {
return configDefs.defaults
},
enumerable: true })
Object.defineProperty(exports, 'types', { get: function () {
return configDefs.types
},
enumerable: true })
exports.validate = validate
var myUid = process.env.SUDO_UID !== undefined
? process.env.SUDO_UID : (process.getuid && process.getuid())
var myGid = process.env.SUDO_GID !== undefined
? process.env.SUDO_GID : (process.getgid && process.getgid())
var loading = false
var loadCbs = []
function load () {
var cli, builtin, cb
for (var i = 0; i < arguments.length; i++) {
switch (typeof arguments[i]) {
case 'string': builtin = arguments[i]; break
case 'object': cli = arguments[i]; break
case 'function': cb = arguments[i]; break
}
}
if (!cb) cb = function () {}
if (exports.loaded) {
var ret = exports.loaded
if (cli) {
ret = new Conf(ret)
ret.unshift(cli)
}
return process.nextTick(cb.bind(null, null, ret))
}
// either a fresh object, or a clone of the passed in obj
if (!cli) {
cli = {}
} else {
cli = Object.keys(cli).reduce(function (c, k) {
c[k] = cli[k]
return c
}, {})
}
loadCbs.push(cb)
if (loading) return
loading = true
cb = once(function (er, conf) {
if (!er) {
exports.loaded = conf
loading = false
}
loadCbs.forEach(function (fn) {
fn(er, conf)
})
loadCbs.length = 0
})
// check for a builtin if provided.
exports.usingBuiltin = !!builtin
var rc = exports.rootConf = new Conf()
if (builtin) {
rc.addFile(builtin, 'builtin')
} else {
rc.add({}, 'builtin')
}
rc.on('load', function () {
load_(builtin, rc, cli, cb)
})
rc.on('error', cb)
}
function load_ (builtin, rc, cli, cb) {
var defaults = configDefs.defaults
var conf = new Conf(rc)
conf.usingBuiltin = !!builtin
conf.add(cli, 'cli')
conf.addEnv()
conf.loadPrefix(function (er) {
if (er) return cb(er)
// If you're doing `npm --userconfig=~/foo.npmrc` then you'd expect
// that ~/.npmrc won't override the stuff in ~/foo.npmrc (or, indeed
// be used at all).
//
// However, if the cwd is ~, then ~/.npmrc is the home for the project
// config, and will override the userconfig.
//
// If you're not setting the userconfig explicitly, then it will be loaded
// twice, which is harmless but excessive. If you *are* setting the
// userconfig explicitly then it will override your explicit intent, and
// that IS harmful and unexpected.
//
// Solution: Do not load project config file that is the same as either
// the default or resolved userconfig value. npm will log a "verbose"
// message about this when it happens, but it is a rare enough edge case
// that we don't have to be super concerned about it.
var projectConf = path.resolve(conf.localPrefix, '.npmrc')
var defaultUserConfig = rc.get('userconfig')
var resolvedUserConfig = conf.get('userconfig')
if (!conf.get('global') &&
projectConf !== defaultUserConfig &&
projectConf !== resolvedUserConfig) {
conf.addFile(projectConf, 'project')
conf.once('load', afterPrefix)
} else {
conf.add({}, 'project')
afterPrefix()
}
})
function afterPrefix () {
conf.addFile(conf.get('userconfig'), 'user')
conf.once('error', cb)
conf.once('load', afterUser)
}
function afterUser () {
// globalconfig and globalignorefile defaults
// need to respond to the 'prefix' setting up to this point.
// Eg, `npm config get globalconfig --prefix ~/local` should
// return `~/local/etc/npmrc`
// annoying humans and their expectations!
if (conf.get('prefix')) {
var etc = path.resolve(conf.get('prefix'), 'etc')
defaults.globalconfig = path.resolve(etc, 'npmrc')
defaults.globalignorefile = path.resolve(etc, 'npmignore')
}
conf.addFile(conf.get('globalconfig'), 'global')
// move the builtin into the conf stack now.
conf.root = defaults
conf.add(rc.shift(), 'builtin')
conf.once('load', function () {
conf.loadExtras(afterExtras)
})
}
function afterExtras (er) {
if (er) return cb(er)
// warn about invalid bits.
validate(conf)
var cafile = conf.get('cafile')
if (cafile) {
return conf.loadCAFile(cafile, finalize)
}
finalize()
}
function finalize (er) {
if (er) {
return cb(er)
}
exports.loaded = conf
cb(er, conf)
}
}
// Basically the same as CC, but:
// 1. Always ini
// 2. Parses environment variable names in field values
// 3. Field values that start with ~/ are replaced with process.env.HOME
// 4. Can inherit from another Conf object, using it as the base.
inherits(Conf, CC)
function Conf (base) {
if (!(this instanceof Conf)) return new Conf(base)
CC.call(this)
if (base) {
if (base instanceof Conf) {
this.root = base.list[0] || base.root
} else {
this.root = base
}
} else {
this.root = configDefs.defaults
}
}
Conf.prototype.loadPrefix = require('./load-prefix.js')
Conf.prototype.loadCAFile = require('./load-cafile.js')
Conf.prototype.loadUid = require('./load-uid.js')
Conf.prototype.setUser = require('./set-user.js')
Conf.prototype.getCredentialsByURI = require('./get-credentials-by-uri.js')
Conf.prototype.setCredentialsByURI = require('./set-credentials-by-uri.js')
Conf.prototype.clearCredentialsByURI = require('./clear-credentials-by-uri.js')
Conf.prototype.loadExtras = function (cb) {
this.setUser(function (er) {
if (er) return cb(er)
this.loadUid(function (er) {
if (er) return cb(er)
// Without prefix, nothing will ever work
mkdirp(this.prefix, cb)
}.bind(this))
}.bind(this))
}
Conf.prototype.save = function (where, cb) {
var target = this.sources[where]
if (!target || !(target.path || target.source) || !target.data) {
var er
if (where !== 'builtin') er = new Error('bad save target: ' + where)
if (cb) {
process.nextTick(cb.bind(null, er))
return this
}
return this.emit('error', er)
}
if (target.source) {
var pref = target.prefix || ''
Object.keys(target.data).forEach(function (k) {
target.source[pref + k] = target.data[k]
})
if (cb) process.nextTick(cb)
return this
}
var data = ini.stringify(target.data)
var then = function then (er) {
if (er) return done(er)
fs.chmod(target.path, mode, done)
}
var done = function done (er) {
if (er) {
if (cb) return cb(er)
else return this.emit('error', er)
}
this._saving--
if (this._saving === 0) {
if (cb) cb()
this.emit('save')
}
}
then = then.bind(this)
done = done.bind(this)
this._saving++
var mode = where === 'user' ? '0600' : '0666'
if (!data.trim()) {
fs.unlink(target.path, function () {
// ignore the possible error (e.g. the file doesn't exist)
done(null)
})
} else {
mkdirp(path.dirname(target.path), function (er) {
if (er) return then(er)
fs.writeFile(target.path, data, 'utf8', function (er) {
if (er) return then(er)
if (where === 'user' && myUid && myGid) {
fs.chown(target.path, +myUid, +myGid, then)
} else {
then()
}
})
})
}
return this
}
Conf.prototype.addFile = function (file, name) {
name = name || file
var marker = { __source__: name }
this.sources[name] = { path: file, type: 'ini' }
this.push(marker)
this._await()
fs.readFile(file, 'utf8', function (er, data) {
// just ignore missing files.
if (er) return this.add({}, marker)
this.addString(data, file, 'ini', marker)
}.bind(this))
return this
}
// always ini files.
Conf.prototype.parse = function (content, file) {
return CC.prototype.parse.call(this, content, file, 'ini')
}
Conf.prototype.add = function (data, marker) {
try {
Object.keys(data).forEach(function (k) {
const newKey = envReplace(k)
const newField = parseField(data[k], newKey)
delete data[k]
data[newKey] = newField
})
} catch (e) {
this.emit('error', e)
return this
}
return CC.prototype.add.call(this, data, marker)
}
Conf.prototype.addEnv = function (env) {
env = env || process.env
var conf = {}
Object.keys(env)
.filter(function (k) { return k.match(/^npm_config_/i) })
.forEach(function (k) {
if (!env[k]) return
// leave first char untouched, even if
// it is a '_' - convert all other to '-'
var p = k.toLowerCase()
.replace(/^npm_config_/, '')
.replace(/(?!^)_/g, '-')
conf[p] = env[k]
})
return CC.prototype.addEnv.call(this, '', conf, 'env')
}
function parseField (f, k) {
if (typeof f !== 'string' && !(f instanceof String)) return f
// type can be an array or single thing.
var typeList = [].concat(types[k])
var isPath = typeList.indexOf(path) !== -1
var isBool = typeList.indexOf(Boolean) !== -1
var isString = typeList.indexOf(String) !== -1
var isUmask = typeList.indexOf(Umask) !== -1
var isNumber = typeList.indexOf(Number) !== -1
f = ('' + f).trim()
if (f.match(/^".*"$/)) {
try {
f = JSON.parse(f)
} catch (e) {
throw new Error('Failed parsing JSON config key ' + k + ': ' + f)
}
}
if (isBool && !isString && f === '') return true
switch (f) {
case 'true': return true
case 'false': return false
case 'null': return null
case 'undefined': return undefined
}
f = envReplace(f)
if (isPath) {
var homePattern = isWindows ? /^~(\/|\\)/ : /^~\//
if (f.match(homePattern) && process.env.HOME) {
f = path.resolve(process.env.HOME, f.substr(2))
}
f = path.resolve(f)
}
if (isUmask) f = umask.fromString(f)
if (isNumber && !isNaN(f)) f = +f
return f
}
function envReplace (f) {
if (typeof f !== 'string' || !f) return f
// replace any ${ENV} values with the appropriate environ.
var envExpr = /(\\*)\$\{([^}]+)\}/g
return f.replace(envExpr, function (orig, esc, name) {
esc = esc.length && esc.length % 2
if (esc) return orig
if (undefined === process.env[name]) {
throw new Error('Failed to replace env in config: ' + orig)
}
return process.env[name]
})
}
function validate (cl) {
// warn about invalid configs at every level.
cl.list.forEach(function (conf) {
nopt.clean(conf, configDefs.types)
})
nopt.clean(cl.root, configDefs.types)
}

433
website/node_modules/npm/lib/config/defaults.js generated vendored Normal file
View File

@@ -0,0 +1,433 @@
// defaults, types, and shorthands.
var path = require('path')
var url = require('url')
var Stream = require('stream').Stream
var semver = require('semver')
var stableFamily = semver.parse(process.version)
var nopt = require('nopt')
var os = require('os')
var osenv = require('osenv')
var umask = require('../utils/umask')
var hasUnicode = require('has-unicode')
var log
try {
log = require('npmlog')
} catch (er) {
var util = require('util')
log = { warn: function (m) {
console.warn(m + ' ' + util.format.apply(util, [].slice.call(arguments, 1)))
} }
}
exports.Umask = Umask
function Umask () {}
function validateUmask (data, k, val) {
return umask.validate(data, k, val)
}
function validateSemver (data, k, val) {
if (!semver.valid(val)) return false
data[k] = semver.valid(val)
}
function validateStream (data, k, val) {
if (!(val instanceof Stream)) return false
data[k] = val
}
nopt.typeDefs.semver = { type: semver, validate: validateSemver }
nopt.typeDefs.Stream = { type: Stream, validate: validateStream }
nopt.typeDefs.Umask = { type: Umask, validate: validateUmask }
nopt.invalidHandler = function (k, val, type) {
log.warn('invalid config', k + '=' + JSON.stringify(val))
if (Array.isArray(type)) {
if (type.indexOf(url) !== -1) type = url
else if (type.indexOf(path) !== -1) type = path
}
switch (type) {
case Umask:
log.warn('invalid config', 'Must be umask, octal number in range 0000..0777')
break
case url:
log.warn('invalid config', "Must be a full url with 'http://'")
break
case path:
log.warn('invalid config', 'Must be a valid filesystem path')
break
case Number:
log.warn('invalid config', 'Must be a numeric value')
break
case Stream:
log.warn('invalid config', 'Must be an instance of the Stream class')
break
}
}
if (!stableFamily || (+stableFamily.minor % 2)) stableFamily = null
else stableFamily = stableFamily.major + '.' + stableFamily.minor
var defaults
var temp = osenv.tmpdir()
var home = osenv.home()
var uidOrPid = process.getuid ? process.getuid() : process.pid
if (home) process.env.HOME = home
else home = path.resolve(temp, 'npm-' + uidOrPid)
var cacheExtra = process.platform === 'win32' ? 'npm-cache' : '.npm'
var cacheRoot = (process.platform === 'win32' && process.env.APPDATA) || home
var cache = path.resolve(cacheRoot, cacheExtra)
var globalPrefix
Object.defineProperty(exports, 'defaults', {get: function () {
if (defaults) return defaults
if (process.env.PREFIX) {
globalPrefix = process.env.PREFIX
} else if (process.platform === 'win32') {
// c:\node\node.exe --> prefix=c:\node\
globalPrefix = path.dirname(process.execPath)
} else {
// /usr/local/bin/node --> prefix=/usr/local
globalPrefix = path.dirname(path.dirname(process.execPath))
// destdir only is respected on Unix
if (process.env.DESTDIR) {
globalPrefix = path.join(process.env.DESTDIR, globalPrefix)
}
}
defaults = {
access: null,
'allow-same-version': false,
'always-auth': false,
also: null,
audit: true,
'audit-level': 'low',
'auth-type': 'legacy',
'bin-links': true,
browser: null,
ca: null,
cafile: null,
cache: cache,
'cache-lock-stale': 60000,
'cache-lock-retries': 10,
'cache-lock-wait': 10000,
'cache-max': Infinity,
'cache-min': 10,
cert: null,
cidr: null,
color: process.env.NO_COLOR == null,
depth: Infinity,
description: true,
dev: false,
'dry-run': false,
editor: osenv.editor(),
'engine-strict': false,
force: false,
'fetch-retries': 2,
'fetch-retry-factor': 10,
'fetch-retry-mintimeout': 10000,
'fetch-retry-maxtimeout': 60000,
git: 'git',
'git-tag-version': true,
'commit-hooks': true,
global: false,
globalconfig: path.resolve(globalPrefix, 'etc', 'npmrc'),
'global-style': false,
group: process.platform === 'win32' ? 0
: process.env.SUDO_GID || (process.getgid && process.getgid()),
'ham-it-up': false,
heading: 'npm',
'if-present': false,
'ignore-prepublish': false,
'ignore-scripts': false,
'init-module': path.resolve(home, '.npm-init.js'),
'init-author-name': '',
'init-author-email': '',
'init-author-url': '',
'init-version': '1.0.0',
'init-license': 'ISC',
json: false,
key: null,
'legacy-bundling': false,
link: false,
'local-address': undefined,
loglevel: 'notice',
logstream: process.stderr,
'logs-max': 10,
long: false,
maxsockets: 50,
message: '%s',
'metrics-registry': null,
'node-options': null,
'node-version': process.version,
'offline': false,
'onload-script': false,
only: null,
optional: true,
otp: null,
'package-lock': true,
'package-lock-only': false,
parseable: false,
'prefer-offline': false,
'prefer-online': false,
prefix: globalPrefix,
preid: '',
production: process.env.NODE_ENV === 'production',
'progress': !process.env.TRAVIS && !process.env.CI,
proxy: null,
'https-proxy': null,
'noproxy': null,
'user-agent': 'npm/{npm-version} ' +
'node/{node-version} ' +
'{platform} ' +
'{arch}',
'read-only': false,
'rebuild-bundle': true,
registry: 'https://registry.npmjs.org/',
rollback: true,
save: true,
'save-bundle': false,
'save-dev': false,
'save-exact': false,
'save-optional': false,
'save-prefix': '^',
'save-prod': false,
scope: '',
'script-shell': null,
'scripts-prepend-node-path': 'warn-only',
searchopts: '',
searchexclude: null,
searchlimit: 20,
searchstaleness: 15 * 60,
'send-metrics': false,
shell: osenv.shell(),
shrinkwrap: true,
'sign-git-commit': false,
'sign-git-tag': false,
'sso-poll-frequency': 500,
'sso-type': 'oauth',
'strict-ssl': true,
tag: 'latest',
'tag-version-prefix': 'v',
timing: false,
tmp: temp,
unicode: hasUnicode(),
'unsafe-perm': process.platform === 'win32' ||
process.platform === 'cygwin' ||
!(process.getuid && process.setuid &&
process.getgid && process.setgid) ||
process.getuid() !== 0,
'update-notifier': true,
usage: false,
user: process.platform === 'win32' ? 0 : 'nobody',
userconfig: path.resolve(home, '.npmrc'),
umask: process.umask ? process.umask() : umask.fromString('022'),
version: false,
versions: false,
viewer: process.platform === 'win32' ? 'browser' : 'man',
_exit: true
}
return defaults
}})
exports.types = {
access: [null, 'restricted', 'public'],
'allow-same-version': Boolean,
'always-auth': Boolean,
also: [null, 'dev', 'development'],
audit: Boolean,
'audit-level': ['low', 'moderate', 'high', 'critical'],
'auth-type': ['legacy', 'sso', 'saml', 'oauth'],
'bin-links': Boolean,
browser: [null, String],
ca: [null, String, Array],
cafile: path,
cache: path,
'cache-lock-stale': Number,
'cache-lock-retries': Number,
'cache-lock-wait': Number,
'cache-max': Number,
'cache-min': Number,
cert: [null, String],
cidr: [null, String, Array],
color: ['always', Boolean],
depth: Number,
description: Boolean,
dev: Boolean,
'dry-run': Boolean,
editor: String,
'engine-strict': Boolean,
force: Boolean,
'fetch-retries': Number,
'fetch-retry-factor': Number,
'fetch-retry-mintimeout': Number,
'fetch-retry-maxtimeout': Number,
git: String,
'git-tag-version': Boolean,
'commit-hooks': Boolean,
global: Boolean,
globalconfig: path,
'global-style': Boolean,
group: [Number, String],
'https-proxy': [null, url],
'user-agent': String,
'ham-it-up': Boolean,
'heading': String,
'if-present': Boolean,
'ignore-prepublish': Boolean,
'ignore-scripts': Boolean,
'init-module': path,
'init-author-name': String,
'init-author-email': String,
'init-author-url': ['', url],
'init-license': String,
'init-version': semver,
json: Boolean,
key: [null, String],
'legacy-bundling': Boolean,
link: Boolean,
'local-address': getLocalAddresses(),
loglevel: ['silent', 'error', 'warn', 'notice', 'http', 'timing', 'info', 'verbose', 'silly'],
logstream: Stream,
'logs-max': Number,
long: Boolean,
maxsockets: Number,
message: String,
'metrics-registry': [null, String],
'node-options': [null, String],
'node-version': [null, semver],
'noproxy': [null, String, Array],
offline: Boolean,
'onload-script': [null, String],
only: [null, 'dev', 'development', 'prod', 'production'],
optional: Boolean,
'package-lock': Boolean,
otp: [null, String],
'package-lock-only': Boolean,
parseable: Boolean,
'prefer-offline': Boolean,
'prefer-online': Boolean,
prefix: path,
preid: String,
production: Boolean,
progress: Boolean,
proxy: [null, false, url], // allow proxy to be disabled explicitly
'read-only': Boolean,
'rebuild-bundle': Boolean,
registry: [null, url],
rollback: Boolean,
save: Boolean,
'save-bundle': Boolean,
'save-dev': Boolean,
'save-exact': Boolean,
'save-optional': Boolean,
'save-prefix': String,
'save-prod': Boolean,
scope: String,
'script-shell': [null, String],
'scripts-prepend-node-path': [false, true, 'auto', 'warn-only'],
searchopts: String,
searchexclude: [null, String],
searchlimit: Number,
searchstaleness: Number,
'send-metrics': Boolean,
shell: String,
shrinkwrap: Boolean,
'sign-git-commit': Boolean,
'sign-git-tag': Boolean,
'sso-poll-frequency': Number,
'sso-type': [null, 'oauth', 'saml'],
'strict-ssl': Boolean,
tag: String,
timing: Boolean,
tmp: path,
unicode: Boolean,
'unsafe-perm': Boolean,
'update-notifier': Boolean,
usage: Boolean,
user: [Number, String],
userconfig: path,
umask: Umask,
version: Boolean,
'tag-version-prefix': String,
versions: Boolean,
viewer: String,
_exit: Boolean
}
function getLocalAddresses () {
var interfaces
// #8094: some environments require elevated permissions to enumerate
// interfaces, and synchronously throw EPERM when run without
// elevated privileges
try {
interfaces = os.networkInterfaces()
} catch (e) {
interfaces = {}
}
return Object.keys(interfaces).map(
nic => interfaces[nic].map(({address}) => address)
).reduce((curr, next) => curr.concat(next), []).concat(undefined)
}
exports.shorthands = {
s: ['--loglevel', 'silent'],
d: ['--loglevel', 'info'],
dd: ['--loglevel', 'verbose'],
ddd: ['--loglevel', 'silly'],
noreg: ['--no-registry'],
N: ['--no-registry'],
reg: ['--registry'],
'no-reg': ['--no-registry'],
silent: ['--loglevel', 'silent'],
verbose: ['--loglevel', 'verbose'],
quiet: ['--loglevel', 'warn'],
q: ['--loglevel', 'warn'],
h: ['--usage'],
H: ['--usage'],
'?': ['--usage'],
help: ['--usage'],
v: ['--version'],
f: ['--force'],
desc: ['--description'],
'no-desc': ['--no-description'],
'local': ['--no-global'],
l: ['--long'],
m: ['--message'],
p: ['--parseable'],
porcelain: ['--parseable'],
readonly: ['--read-only'],
g: ['--global'],
S: ['--save'],
D: ['--save-dev'],
E: ['--save-exact'],
O: ['--save-optional'],
P: ['--save-prod'],
y: ['--yes'],
n: ['--no-yes'],
B: ['--save-bundle'],
C: ['--prefix']
}

77
website/node_modules/npm/lib/config/fetch-opts.js generated vendored Normal file
View File

@@ -0,0 +1,77 @@
'use strict'
const url = require('url')
module.exports.fromPacote = fromPacote
function fromPacote (opts) {
return {
cache: getCacheMode(opts),
cacheManager: opts.cache,
ca: opts.ca,
cert: opts.cert,
headers: getHeaders('', opts.registry, opts),
key: opts.key,
localAddress: opts.localAddress,
maxSockets: opts.maxSockets,
proxy: opts.proxy,
referer: opts.refer,
retry: opts.retry,
strictSSL: !!opts.strictSSL,
timeout: opts.timeout,
uid: opts.uid,
gid: opts.gid
}
}
function getCacheMode (opts) {
return opts.offline
? 'only-if-cached'
: opts.preferOffline
? 'force-cache'
: opts.preferOnline
? 'no-cache'
: 'default'
}
function getHeaders (uri, registry, opts) {
const headers = Object.assign({
'npm-in-ci': opts.isFromCI,
'npm-scope': opts.projectScope,
'npm-session': opts.npmSession,
'user-agent': opts.userAgent,
'referer': opts.refer
}, opts.headers)
// check for auth settings specific to this registry
let auth = (
opts.auth &&
opts.auth[registryKey(registry)]
) || opts.auth
// If a tarball is hosted on a different place than the manifest, only send
// credentials on `alwaysAuth`
const shouldAuth = auth && (
auth.alwaysAuth ||
url.parse(uri).host === url.parse(registry).host
)
if (shouldAuth && auth.token) {
headers.authorization = `Bearer ${auth.token}`
} else if (shouldAuth && auth.username && auth.password) {
const encoded = Buffer.from(
`${auth.username}:${auth.password}`, 'utf8'
).toString('base64')
headers.authorization = `Basic ${encoded}`
} else if (shouldAuth && auth._auth) {
headers.authorization = `Basic ${auth._auth}`
}
return headers
}
function registryKey (registry) {
const parsed = url.parse(registry)
const formatted = url.format({
host: parsed.host,
pathname: parsed.pathname,
slashes: parsed.slashes
})
return url.resolve(formatted, '.')
}

32
website/node_modules/npm/lib/config/gentle-fs.js generated vendored Normal file
View File

@@ -0,0 +1,32 @@
'use strict'
const npm = require('../npm.js')
const log = require('npmlog')
module.exports = gentleFSOpts
function gentleFSOpts (gently, base, abs) {
return {
// never rm the root, prefix, or bin dirs
//
// globals included because of `npm link` -- as far as the package
// requesting the link is concerned, the linked package is always
// installed globally
prefixes: [
npm.prefix,
npm.globalPrefix,
npm.dir,
npm.root,
npm.globalDir,
npm.bin,
npm.globalBin
],
absolute: abs,
log: log,
prefix: npm.prefix,
force: npm.config.get('force'),
gently: gently,
base: base,
name: 'npm'
}
}

View File

@@ -0,0 +1,78 @@
var assert = require('assert')
var toNerfDart = require('./nerf-dart.js')
module.exports = getCredentialsByURI
function getCredentialsByURI (uri) {
assert(uri && typeof uri === 'string', 'registry URL is required')
var nerfed = toNerfDart(uri)
var defnerf = toNerfDart(this.get('registry'))
// hidden class micro-optimization
var c = {
scope: nerfed,
token: undefined,
password: undefined,
username: undefined,
email: undefined,
auth: undefined,
alwaysAuth: undefined
}
// used to override scope matching for tokens as well as legacy auth
if (this.get(nerfed + ':always-auth') !== undefined) {
var val = this.get(nerfed + ':always-auth')
c.alwaysAuth = val === 'false' ? false : !!val
} else if (this.get('always-auth') !== undefined) {
c.alwaysAuth = this.get('always-auth')
}
if (this.get(nerfed + ':_authToken')) {
c.token = this.get(nerfed + ':_authToken')
// the bearer token is enough, don't confuse things
return c
}
if (this.get(nerfed + ':-authtoken')) {
c.token = this.get(nerfed + ':-authtoken')
// the bearer token is enough, don't confuse things
return c
}
// Handle the old-style _auth=<base64> style for the default
// registry, if set.
var authDef = this.get('_auth')
var userDef = this.get('username')
var passDef = this.get('_password')
if (authDef && !(userDef && passDef)) {
authDef = Buffer.from(authDef, 'base64').toString()
authDef = authDef.split(':')
userDef = authDef.shift()
passDef = authDef.join(':')
}
if (this.get(nerfed + ':_password')) {
c.password = Buffer.from(this.get(nerfed + ':_password'), 'base64').toString('utf8')
} else if (nerfed === defnerf && passDef) {
c.password = passDef
}
if (this.get(nerfed + ':username')) {
c.username = this.get(nerfed + ':username')
} else if (nerfed === defnerf && userDef) {
c.username = userDef
}
if (this.get(nerfed + ':email')) {
c.email = this.get(nerfed + ':email')
} else if (this.get('email')) {
c.email = this.get('email')
}
if (c.username && c.password) {
c.auth = Buffer.from(c.username + ':' + c.password).toString('base64')
}
return c
}

31
website/node_modules/npm/lib/config/lifecycle.js generated vendored Normal file
View File

@@ -0,0 +1,31 @@
'use strict'
const npm = require('../npm.js')
const log = require('npmlog')
module.exports = lifecycleOpts
let opts
function lifecycleOpts (moreOpts) {
if (!opts) {
opts = {
config: npm.config.snapshot,
dir: npm.dir,
failOk: false,
force: npm.config.get('force'),
group: npm.config.get('group'),
ignorePrepublish: npm.config.get('ignore-prepublish'),
ignoreScripts: npm.config.get('ignore-scripts'),
log: log,
nodeOptions: npm.config.get('node-options'),
production: npm.config.get('production'),
scriptShell: npm.config.get('script-shell'),
scriptsPrependNodePath: npm.config.get('scripts-prepend-node-path'),
unsafePerm: npm.config.get('unsafe-perm'),
user: npm.config.get('user')
}
}
return moreOpts ? Object.assign({}, opts, moreOpts) : opts
}

32
website/node_modules/npm/lib/config/load-cafile.js generated vendored Normal file
View File

@@ -0,0 +1,32 @@
module.exports = loadCAFile
var fs = require('fs')
function loadCAFile (cafilePath, cb) {
if (!cafilePath) return process.nextTick(cb)
fs.readFile(cafilePath, 'utf8', afterCARead.bind(this))
function afterCARead (er, cadata) {
if (er) {
// previous cafile no longer exists, so just continue on gracefully
if (er.code === 'ENOENT') return cb()
return cb(er)
}
var delim = '-----END CERTIFICATE-----'
var output
output = cadata
.split(delim)
.filter(function (xs) {
return !!xs.trim()
})
.map(function (xs) {
return xs.trimLeft() + delim
})
this.set('ca', output)
cb(null)
}
}

51
website/node_modules/npm/lib/config/load-prefix.js generated vendored Normal file
View File

@@ -0,0 +1,51 @@
module.exports = loadPrefix
var findPrefix = require('find-npm-prefix')
var path = require('path')
function loadPrefix (cb) {
var cli = this.list[0]
Object.defineProperty(this, 'prefix',
{
set: function (prefix) {
var g = this.get('global')
this[g ? 'globalPrefix' : 'localPrefix'] = prefix
}.bind(this),
get: function () {
var g = this.get('global')
return g ? this.globalPrefix : this.localPrefix
}.bind(this),
enumerable: true
})
Object.defineProperty(this, 'globalPrefix',
{
set: function (prefix) {
this.set('prefix', prefix)
}.bind(this),
get: function () {
return path.resolve(this.get('prefix'))
}.bind(this),
enumerable: true
})
var p
Object.defineProperty(this, 'localPrefix',
{ set: function (prefix) { p = prefix },
get: function () { return p },
enumerable: true })
// try to guess at a good node_modules location.
// If we are *explicitly* given a prefix on the cli, then
// always use that. otherwise, infer local prefix from cwd.
if (Object.prototype.hasOwnProperty.call(cli, 'prefix')) {
p = path.resolve(cli.prefix)
process.nextTick(cb)
} else {
findPrefix(process.cwd()).then((found) => {
p = found
cb()
}, cb)
}
}

15
website/node_modules/npm/lib/config/load-uid.js generated vendored Normal file
View File

@@ -0,0 +1,15 @@
module.exports = loadUid
var getUid = require('uid-number')
// Call in the context of a npmconf object
function loadUid (cb) {
// if we're not in unsafe-perm mode, then figure out who
// to run stuff as. Do this first, to support `npm update npm -g`
if (!this.get('unsafe-perm')) {
getUid(this.get('user'), this.get('group'), cb)
} else {
process.nextTick(cb)
}
}

23
website/node_modules/npm/lib/config/nerf-dart.js generated vendored Normal file
View File

@@ -0,0 +1,23 @@
var url = require('url')
module.exports = toNerfDart
/**
* Maps a URL to an identifier.
*
* Name courtesy schiffertronix media LLC, a New Jersey corporation
*
* @param {String} uri The URL to be nerfed.
*
* @returns {String} A nerfed URL.
*/
function toNerfDart (uri) {
var parsed = url.parse(uri)
delete parsed.protocol
delete parsed.auth
delete parsed.query
delete parsed.search
delete parsed.hash
return url.resolve(url.format(parsed), '.')
}

141
website/node_modules/npm/lib/config/pacote.js generated vendored Normal file
View File

@@ -0,0 +1,141 @@
'use strict'
const Buffer = require('safe-buffer').Buffer
const crypto = require('crypto')
const npm = require('../npm')
const log = require('npmlog')
let pack
const path = require('path')
let effectiveOwner
const npmSession = crypto.randomBytes(8).toString('hex')
log.verbose('npm-session', npmSession)
module.exports = pacoteOpts
function pacoteOpts (moreOpts) {
if (!pack) {
pack = require('../pack.js')
}
const ownerStats = calculateOwner()
const opts = {
cache: path.join(npm.config.get('cache'), '_cacache'),
ca: npm.config.get('ca'),
cert: npm.config.get('cert'),
defaultTag: npm.config.get('tag'),
dirPacker: pack.packGitDep,
hashAlgorithm: 'sha1',
includeDeprecated: false,
key: npm.config.get('key'),
localAddress: npm.config.get('local-address'),
log: log,
maxAge: npm.config.get('cache-min'),
maxSockets: npm.config.get('maxsockets'),
npmSession: npmSession,
offline: npm.config.get('offline'),
preferOffline: npm.config.get('prefer-offline') || npm.config.get('cache-min') > 9999,
preferOnline: npm.config.get('prefer-online') || npm.config.get('cache-max') <= 0,
projectScope: npm.projectScope,
proxy: npm.config.get('https-proxy') || npm.config.get('proxy'),
noProxy: npm.config.get('noproxy'),
refer: npm.registry.refer,
registry: npm.config.get('registry'),
retry: {
retries: npm.config.get('fetch-retries'),
factor: npm.config.get('fetch-retry-factor'),
minTimeout: npm.config.get('fetch-retry-mintimeout'),
maxTimeout: npm.config.get('fetch-retry-maxtimeout')
},
scope: npm.config.get('scope'),
strictSSL: npm.config.get('strict-ssl'),
userAgent: npm.config.get('user-agent'),
dmode: npm.modes.exec,
fmode: npm.modes.file,
umask: npm.modes.umask
}
if (ownerStats.uid != null || ownerStats.gid != null) {
Object.assign(opts, ownerStats)
}
npm.config.keys.forEach(function (k) {
const authMatchGlobal = k.match(
/^(_authToken|username|_password|password|email|always-auth|_auth)$/
)
const authMatchScoped = k[0] === '/' && k.match(
/(.*):(_authToken|username|_password|password|email|always-auth|_auth)$/
)
// if it matches scoped it will also match global
if (authMatchGlobal || authMatchScoped) {
let nerfDart = null
let key = null
let val = null
if (!opts.auth) { opts.auth = {} }
if (authMatchScoped) {
nerfDart = authMatchScoped[1]
key = authMatchScoped[2]
val = npm.config.get(k)
if (!opts.auth[nerfDart]) {
opts.auth[nerfDart] = {
alwaysAuth: !!npm.config.get('always-auth')
}
}
} else {
key = authMatchGlobal[1]
val = npm.config.get(k)
opts.auth.alwaysAuth = !!npm.config.get('always-auth')
}
const auth = authMatchScoped ? opts.auth[nerfDart] : opts.auth
if (key === '_authToken') {
auth.token = val
} else if (key.match(/password$/i)) {
auth.password =
// the config file stores password auth already-encoded. pacote expects
// the actual username/password pair.
Buffer.from(val, 'base64').toString('utf8')
} else if (key === 'always-auth') {
auth.alwaysAuth = val === 'false' ? false : !!val
} else {
auth[key] = val
}
}
if (k[0] === '@') {
if (!opts.scopeTargets) { opts.scopeTargets = {} }
opts.scopeTargets[k.replace(/:registry$/, '')] = npm.config.get(k)
}
})
Object.keys(moreOpts || {}).forEach((k) => {
opts[k] = moreOpts[k]
})
return opts
}
function calculateOwner () {
if (!effectiveOwner) {
effectiveOwner = { uid: 0, gid: 0 }
// Pretty much only on windows
if (!process.getuid) {
return effectiveOwner
}
effectiveOwner.uid = +process.getuid()
effectiveOwner.gid = +process.getgid()
if (effectiveOwner.uid === 0) {
if (process.env.SUDO_UID) effectiveOwner.uid = +process.env.SUDO_UID
if (process.env.SUDO_GID) effectiveOwner.gid = +process.env.SUDO_GID
}
}
return effectiveOwner
}

29
website/node_modules/npm/lib/config/reg-client.js generated vendored Normal file
View File

@@ -0,0 +1,29 @@
'use strict'
module.exports = regClientConfig
function regClientConfig (npm, log, config) {
return {
proxy: {
http: config.get('proxy'),
https: config.get('https-proxy'),
localAddress: config.get('local-address')
},
ssl: {
certificate: config.get('cert'),
key: config.get('key'),
ca: config.get('ca'),
strict: config.get('strict-ssl')
},
retry: {
retries: config.get('fetch-retries'),
factor: config.get('fetch-retry-factor'),
minTimeout: config.get('fetch-retry-mintimeout'),
maxTimeout: config.get('fetch-retry-maxtimeout')
},
userAgent: config.get('user-agent'),
log: log,
defaultTag: config.get('tag'),
maxSockets: config.get('maxsockets'),
scope: npm.projectScope
}
}

View File

@@ -0,0 +1,39 @@
var assert = require('assert')
var toNerfDart = require('./nerf-dart.js')
module.exports = setCredentialsByURI
function setCredentialsByURI (uri, c) {
assert(uri && typeof uri === 'string', 'registry URL is required')
assert(c && typeof c === 'object', 'credentials are required')
var nerfed = toNerfDart(uri)
if (c.token) {
this.set(nerfed + ':_authToken', c.token, 'user')
this.del(nerfed + ':_password', 'user')
this.del(nerfed + ':username', 'user')
this.del(nerfed + ':email', 'user')
this.del(nerfed + ':always-auth', 'user')
} else if (c.username || c.password || c.email) {
assert(c.username, 'must include username')
assert(c.password, 'must include password')
assert(c.email, 'must include email address')
this.del(nerfed + ':_authToken', 'user')
var encoded = Buffer.from(c.password, 'utf8').toString('base64')
this.set(nerfed + ':_password', encoded, 'user')
this.set(nerfed + ':username', c.username, 'user')
this.set(nerfed + ':email', c.email, 'user')
if (c.alwaysAuth !== undefined) {
this.set(nerfed + ':always-auth', c.alwaysAuth, 'user')
} else {
this.del(nerfed + ':always-auth', 'user')
}
} else {
throw new Error('No credentials to set.')
}
}

29
website/node_modules/npm/lib/config/set-user.js generated vendored Normal file
View File

@@ -0,0 +1,29 @@
module.exports = setUser
var assert = require('assert')
var path = require('path')
var fs = require('fs')
var mkdirp = require('mkdirp')
function setUser (cb) {
var defaultConf = this.root
assert(defaultConf !== Object.prototype)
// If global, leave it as-is.
// If not global, then set the user to the owner of the prefix folder.
// Just set the default, so it can be overridden.
if (this.get('global')) return cb()
if (process.env.SUDO_UID) {
defaultConf.user = +(process.env.SUDO_UID)
return cb()
}
var prefix = path.resolve(this.get('prefix'))
mkdirp(prefix, function (er) {
if (er) return cb(er)
fs.stat(prefix, function (er, st) {
defaultConf.user = st && st.uid
return cb(er)
})
})
}

160
website/node_modules/npm/lib/dedupe.js generated vendored Normal file
View File

@@ -0,0 +1,160 @@
var util = require('util')
var path = require('path')
var validate = require('aproba')
var without = require('lodash.without')
var asyncMap = require('slide').asyncMap
var chain = require('slide').chain
var npa = require('npm-package-arg')
var log = require('npmlog')
var npm = require('./npm.js')
var Installer = require('./install.js').Installer
var findRequirement = require('./install/deps.js').findRequirement
var earliestInstallable = require('./install/deps.js').earliestInstallable
var checkPermissions = require('./install/check-permissions.js')
var decomposeActions = require('./install/decompose-actions.js')
var loadExtraneous = require('./install/deps.js').loadExtraneous
var computeMetadata = require('./install/deps.js').computeMetadata
var sortActions = require('./install/diff-trees.js').sortActions
var moduleName = require('./utils/module-name.js')
var packageId = require('./utils/package-id.js')
var childPath = require('./utils/child-path.js')
var usage = require('./utils/usage')
var getRequested = require('./install/get-requested.js')
module.exports = dedupe
module.exports.Deduper = Deduper
dedupe.usage = usage(
'dedupe',
'npm dedupe'
)
function dedupe (args, cb) {
validate('AF', arguments)
// the /path/to/node_modules/..
var where = path.resolve(npm.dir, '..')
var dryrun = false
if (npm.command.match(/^find/)) dryrun = true
if (npm.config.get('dry-run')) dryrun = true
if (dryrun && !npm.config.get('json')) npm.config.set('parseable', true)
new Deduper(where, dryrun).run(cb)
}
function Deduper (where, dryrun) {
validate('SB', arguments)
Installer.call(this, where, dryrun, [])
this.noPackageJsonOk = true
this.topLevelLifecycles = false
}
util.inherits(Deduper, Installer)
Deduper.prototype.loadIdealTree = function (cb) {
validate('F', arguments)
log.silly('install', 'loadIdealTree')
var self = this
chain([
[this.newTracker(this.progress.loadIdealTree, 'cloneCurrentTree')],
[this, this.cloneCurrentTreeToIdealTree],
[this, this.finishTracker, 'cloneCurrentTree'],
[this.newTracker(this.progress.loadIdealTree, 'loadAllDepsIntoIdealTree', 10)],
[ function (next) {
loadExtraneous(self.idealTree, self.progress.loadAllDepsIntoIdealTree, next)
} ],
[this, this.finishTracker, 'loadAllDepsIntoIdealTree'],
[this, andComputeMetadata(this.idealTree)]
], cb)
}
function andComputeMetadata (tree) {
return function (next) {
next(null, computeMetadata(tree))
}
}
Deduper.prototype.generateActionsToTake = function (cb) {
validate('F', arguments)
log.silly('dedupe', 'generateActionsToTake')
chain([
[this.newTracker(log, 'hoist', 1)],
[hoistChildren, this.idealTree, this.differences],
[this, this.finishTracker, 'hoist'],
[this.newTracker(log, 'sort-actions', 1)],
[this, function (next) {
this.differences = sortActions(this.differences)
next()
}],
[this, this.finishTracker, 'sort-actions'],
[checkPermissions, this.differences],
[decomposeActions, this.differences, this.todo]
], cb)
}
function move (node, hoistTo, diff) {
node.parent.children = without(node.parent.children, node)
hoistTo.children.push(node)
node.fromPath = node.path
node.path = childPath(hoistTo.path, node)
node.parent = hoistTo
if (!diff.filter(function (action) { return action[0] === 'move' && action[1] === node }).length) {
diff.push(['move', node])
}
}
function moveRemainingChildren (node, diff) {
node.children.forEach(function (child) {
move(child, node, diff)
moveRemainingChildren(child, diff)
})
}
function remove (child, diff, done) {
remove_(child, diff, new Set(), done)
}
function remove_ (child, diff, seen, done) {
if (seen.has(child)) return done()
seen.add(child)
diff.push(['remove', child])
child.parent.children = without(child.parent.children, child)
asyncMap(child.children, function (child, next) {
remove_(child, diff, seen, next)
}, done)
}
function hoistChildren (tree, diff, next) {
hoistChildren_(tree, diff, new Set(), next)
}
function hoistChildren_ (tree, diff, seen, next) {
validate('OAOF', arguments)
if (seen.has(tree)) return next()
seen.add(tree)
asyncMap(tree.children, function (child, done) {
if (!tree.parent || child.fromBundle || child.package._inBundle) return hoistChildren_(child, diff, seen, done)
var better = findRequirement(tree.parent, moduleName(child), getRequested(child) || npa(packageId(child)))
if (better) {
return chain([
[remove, child, diff],
[andComputeMetadata(tree)]
], done)
}
var hoistTo = earliestInstallable(tree, tree.parent, child.package, log)
if (hoistTo) {
move(child, hoistTo, diff)
chain([
[andComputeMetadata(hoistTo)],
[hoistChildren_, child, diff, seen],
[ function (next) {
moveRemainingChildren(child, diff)
next()
} ]
], done)
} else {
done()
}
}, next)
}

55
website/node_modules/npm/lib/deprecate.js generated vendored Normal file
View File

@@ -0,0 +1,55 @@
/* eslint-disable standard/no-callback-literal */
var npm = require('./npm.js')
var mapToRegistry = require('./utils/map-to-registry.js')
var npa = require('npm-package-arg')
module.exports = deprecate
deprecate.usage = 'npm deprecate <pkg>[@<version>] <message>'
deprecate.completion = function (opts, cb) {
// first, get a list of remote packages this user owns.
// once we have a user account, then don't complete anything.
if (opts.conf.argv.remain.length > 2) return cb()
// get the list of packages by user
var path = '/-/by-user/'
mapToRegistry(path, npm.config, function (er, uri, c) {
if (er) return cb(er)
if (!(c && c.username)) return cb()
var params = {
timeout: 60000,
auth: c
}
npm.registry.get(uri + c.username, params, function (er, list) {
if (er) return cb()
console.error(list)
return cb(null, list[c.username])
})
})
}
function deprecate (args, cb) {
var pkg = args[0]
var msg = args[1]
if (msg === undefined) return cb('Usage: ' + deprecate.usage)
// fetch the data and make sure it exists.
var p = npa(pkg)
// npa makes the default spec "latest", but for deprecation
// "*" is the appropriate default.
var spec = p.rawSpec === '' ? '*' : p.fetchSpec
mapToRegistry(p.name, npm.config, function (er, uri, auth) {
if (er) return cb(er)
var params = {
version: spec,
message: msg,
auth: auth
}
npm.registry.deprecate(uri, params, cb)
})
}

159
website/node_modules/npm/lib/dist-tag.js generated vendored Normal file
View File

@@ -0,0 +1,159 @@
/* eslint-disable standard/no-callback-literal */
module.exports = distTag
var log = require('npmlog')
var npa = require('npm-package-arg')
var semver = require('semver')
var npm = require('./npm.js')
var mapToRegistry = require('./utils/map-to-registry.js')
var readLocalPkg = require('./utils/read-local-package.js')
var usage = require('./utils/usage')
var output = require('./utils/output.js')
distTag.usage = usage(
'dist-tag',
'npm dist-tag add <pkg>@<version> [<tag>]' +
'\nnpm dist-tag rm <pkg> <tag>' +
'\nnpm dist-tag ls [<pkg>]'
)
distTag.completion = function (opts, cb) {
var argv = opts.conf.argv.remain
if (argv.length === 2) {
return cb(null, ['add', 'rm', 'ls'])
}
switch (argv[2]) {
default:
return cb()
}
}
function distTag (args, cb) {
var cmd = args.shift()
switch (cmd) {
case 'add': case 'a': case 'set': case 's':
return add(args[0], args[1], cb)
case 'rm': case 'r': case 'del': case 'd': case 'remove':
return remove(args[1], args[0], cb)
case 'ls': case 'l': case 'sl': case 'list':
return list(args[0], cb)
default:
return cb('Usage:\n' + distTag.usage)
}
}
function add (spec, tag, cb) {
var thing = npa(spec || '')
var pkg = thing.name
var version = thing.rawSpec
var t = (tag || npm.config.get('tag')).trim()
log.verbose('dist-tag add', t, 'to', pkg + '@' + version)
if (!pkg || !version || !t) return cb('Usage:\n' + distTag.usage)
if (semver.validRange(t)) {
var er = new Error('Tag name must not be a valid SemVer range: ' + t)
return cb(er)
}
fetchTags(pkg, function (er, tags) {
if (er) return cb(er)
if (tags[t] === version) {
log.warn('dist-tag add', t, 'is already set to version', version)
return cb()
}
tags[t] = version
mapToRegistry(pkg, npm.config, function (er, uri, auth, base) {
var params = {
'package': pkg,
distTag: t,
version: version,
auth: auth
}
npm.registry.distTags.add(base, params, function (er) {
if (er) return cb(er)
output('+' + t + ': ' + pkg + '@' + version)
cb()
})
})
})
}
function remove (tag, pkg, cb) {
log.verbose('dist-tag del', tag, 'from', pkg)
fetchTags(pkg, function (er, tags) {
if (er) return cb(er)
if (!tags[tag]) {
log.info('dist-tag del', tag, 'is not a dist-tag on', pkg)
return cb(new Error(tag + ' is not a dist-tag on ' + pkg))
}
var version = tags[tag]
delete tags[tag]
mapToRegistry(pkg, npm.config, function (er, uri, auth, base) {
var params = {
'package': pkg,
distTag: tag,
auth: auth
}
npm.registry.distTags.rm(base, params, function (er) {
if (er) return cb(er)
output('-' + tag + ': ' + pkg + '@' + version)
cb()
})
})
})
}
function list (pkg, cb) {
if (!pkg) {
return readLocalPkg(function (er, pkg) {
if (er) return cb(er)
if (!pkg) return cb(distTag.usage)
list(pkg, cb)
})
}
fetchTags(pkg, function (er, tags) {
if (er) {
log.error('dist-tag ls', "Couldn't get dist-tag data for", pkg)
return cb(er)
}
var msg = Object.keys(tags).map(function (k) {
return k + ': ' + tags[k]
}).sort().join('\n')
output(msg)
cb(er, tags)
})
}
function fetchTags (pkg, cb) {
mapToRegistry(pkg, npm.config, function (er, uri, auth, base) {
if (er) return cb(er)
var params = {
'package': pkg,
auth: auth
}
npm.registry.distTags.fetch(base, params, function (er, tags) {
if (er) return cb(er)
if (!tags || !Object.keys(tags).length) {
return cb(new Error('No dist-tags found for ' + pkg))
}
cb(null, tags)
})
})
}

41
website/node_modules/npm/lib/docs.js generated vendored Normal file
View File

@@ -0,0 +1,41 @@
module.exports = docs
var openUrl = require('./utils/open-url')
var log = require('npmlog')
var fetchPackageMetadata = require('./fetch-package-metadata.js')
var usage = require('./utils/usage')
docs.usage = usage(
'docs',
'npm docs <pkgname>' +
'\nnpm docs .'
)
docs.completion = function (opts, cb) {
// FIXME: there used to be registry completion here, but it stopped making
// sense somewhere around 50,000 packages on the registry
cb()
}
function docs (args, cb) {
if (!args || !args.length) args = ['.']
var pending = args.length
log.silly('docs', args)
args.forEach(function (proj) {
getDoc(proj, function (err) {
if (err) {
return cb(err)
}
--pending || cb()
})
})
}
function getDoc (project, cb) {
log.silly('getDoc', project)
fetchPackageMetadata(project, '.', {fullMetadata: true}, function (er, d) {
if (er) return cb(er)
var url = d.homepage
if (!url) url = 'https://www.npmjs.org/package/' + d.name
return openUrl(url, 'docs available at the following URL', cb)
})
}

113
website/node_modules/npm/lib/doctor.js generated vendored Normal file
View File

@@ -0,0 +1,113 @@
'use strict'
const ansiTrim = require('./utils/ansi-trim')
const chain = require('slide').chain
const color = require('ansicolors')
const defaultRegistry = require('./config/defaults').defaults.registry
const log = require('npmlog')
const npm = require('./npm')
const output = require('./utils/output')
const path = require('path')
const semver = require('semver')
const styles = require('ansistyles')
const table = require('text-table')
// steps
const checkFilesPermission = require('./doctor/check-files-permission')
const checkPing = require('./doctor/check-ping')
const getGitPath = require('./doctor/get-git-path')
const getLatestNodejsVersion = require('./doctor/get-latest-nodejs-version')
const getLatestNpmVersion = require('./doctor/get-latest-npm-version')
const verifyCachedFiles = require('./doctor/verify-cached-files')
const globalNodeModules = path.join(npm.config.globalPrefix, 'lib', 'node_modules')
const localNodeModules = path.join(npm.config.localPrefix, 'node_modules')
module.exports = doctor
doctor.usage = 'npm doctor'
function doctor (args, silent, cb) {
args = args || {}
if (typeof cb !== 'function') {
cb = silent
silent = false
}
const actionsToRun = [
[checkPing],
[getLatestNpmVersion],
[getLatestNodejsVersion, args['node-url']],
[getGitPath],
[checkFilesPermission, npm.cache, 4, 6],
[checkFilesPermission, globalNodeModules, 4, 4],
[checkFilesPermission, localNodeModules, 6, 6],
[verifyCachedFiles, path.join(npm.cache, '_cacache')]
]
log.info('doctor', 'Running checkup')
chain(actionsToRun, function (stderr, stdout) {
if (stderr && stderr.message !== 'not found: git') return cb(stderr)
const list = makePretty(stdout)
let outHead = ['Check', 'Value', 'Recommendation']
let outBody = list
if (npm.color) {
outHead = outHead.map(function (item) {
return styles.underline(item)
})
outBody = outBody.map(function (item) {
if (item[2]) {
item[0] = color.red(item[0])
item[2] = color.magenta(item[2])
}
return item
})
}
const outTable = [outHead].concat(outBody)
const tableOpts = {
stringLength: function (s) { return ansiTrim(s).length }
}
if (!silent) output(table(outTable, tableOpts))
cb(null, list)
})
}
function makePretty (p) {
const ping = p[1]
const npmLTS = p[2]
const nodeLTS = p[3].replace('v', '')
const whichGit = p[4] || 'not installed'
const readbleCaches = p[5] ? 'ok' : 'notOk'
const executableGlobalModules = p[6] ? 'ok' : 'notOk'
const executableLocalModules = p[7] ? 'ok' : 'notOk'
const cacheStatus = p[8] ? `verified ${p[8].verifiedContent} tarballs` : 'notOk'
const npmV = npm.version
const nodeV = process.version.replace('v', '')
const registry = npm.config.get('registry')
const list = [
['npm ping', ping],
['npm -v', 'v' + npmV],
['node -v', 'v' + nodeV],
['npm config get registry', registry],
['which git', whichGit],
['Perms check on cached files', readbleCaches],
['Perms check on global node_modules', executableGlobalModules],
['Perms check on local node_modules', executableLocalModules],
['Verify cache contents', cacheStatus]
]
if (p[0] !== 200) list[0][2] = 'Check your internet connection'
if (!semver.satisfies(npmV, '>=' + npmLTS)) list[1][2] = 'Use npm v' + npmLTS
if (!semver.satisfies(nodeV, '>=' + nodeLTS)) list[2][2] = 'Use node v' + nodeLTS
if (registry !== defaultRegistry) list[3][2] = 'Try `npm config set registry ' + defaultRegistry + '`'
if (whichGit === 'not installed') list[4][2] = 'Install git and ensure it\'s in your PATH.'
if (readbleCaches !== 'ok') list[5][2] = 'Check the permissions of your files in ' + npm.config.get('cache')
if (executableGlobalModules !== 'ok') list[6][2] = globalNodeModules + ' must be readable and writable by the current user.'
if (executableLocalModules !== 'ok') list[7][2] = localNodeModules + ' must be readable and writable by the current user.'
return list
}

View File

@@ -0,0 +1,57 @@
var fs = require('fs')
var path = require('path')
var getUid = require('uid-number')
var chain = require('slide').chain
var log = require('npmlog')
var npm = require('../npm.js')
var fileCompletion = require('../utils/completion/file-completion.js')
function checkFilesPermission (root, fmask, dmask, cb) {
if (process.platform === 'win32') return cb(null, true)
getUid(npm.config.get('user'), npm.config.get('group'), function (e, uid, gid) {
var tracker = log.newItem('checkFilePermissions', 1)
if (e) {
tracker.finish()
tracker.warn('checkFilePermissions', 'Error looking up user and group:', e)
return cb(e)
}
tracker.info('checkFilePermissions', 'Building file list of ' + root)
fileCompletion(root, '.', Infinity, function (e, files) {
if (e) {
tracker.warn('checkFilePermissions', 'Error building file list:', e)
tracker.finish()
return cb(e)
}
tracker.addWork(files.length)
tracker.completeWork(1)
chain(files.map(andCheckFile), function (er) {
tracker.finish()
cb(null, !er)
})
function andCheckFile (f) {
return [checkFile, f]
}
function checkFile (f, next) {
var file = path.join(root, f)
tracker.silly('checkFilePermissions', f)
fs.lstat(file, function (e, stat) {
tracker.completeWork(1)
if (e) return next(e)
if (!stat.isDirectory() && !stat.isFile()) return next()
// 6 = fs.constants.R_OK | fs.constants.W_OK
// constants aren't available on v4
fs.access(file, stat.isFile() ? fmask : dmask, (err) => {
if (err) {
tracker.error('checkFilePermissions', `Missing permissions on ${file}`)
return next(new Error('Missing permissions for ' + file))
} else {
return next()
}
})
})
}
})
})
}
module.exports = checkFilesPermission

12
website/node_modules/npm/lib/doctor/check-ping.js generated vendored Normal file
View File

@@ -0,0 +1,12 @@
var log = require('npmlog')
var ping = require('../ping.js')
function checkPing (cb) {
var tracker = log.newItem('checkPing', 1)
tracker.info('checkPing', 'Pinging registry')
ping({}, true, (_err, pong, data, res) => {
cb(null, [res.statusCode, res.statusMessage])
})
}
module.exports = checkPing

13
website/node_modules/npm/lib/doctor/get-git-path.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
var log = require('npmlog')
var which = require('which')
function getGitPath (cb) {
var tracker = log.newItem('getGitPath', 1)
tracker.info('getGitPath', 'Finding git in your PATH')
which('git', function (err, path) {
tracker.finish()
cb(err, path)
})
}
module.exports = getGitPath

View File

@@ -0,0 +1,27 @@
var log = require('npmlog')
var request = require('request')
var semver = require('semver')
function getLatestNodejsVersion (url, cb) {
var tracker = log.newItem('getLatestNodejsVersion', 1)
tracker.info('getLatestNodejsVersion', 'Getting Node.js release information')
var version = 'v0.0.0'
url = url || 'https://nodejs.org/dist/index.json'
request(url, function (e, res, index) {
tracker.finish()
if (e) return cb(e)
if (res.statusCode !== 200) {
return cb(new Error('Status not 200, ' + res.statusCode))
}
try {
JSON.parse(index).forEach(function (item) {
if (item.lts && semver.gt(item.version, version)) version = item.version
})
cb(null, version)
} catch (e) {
cb(e)
}
})
}
module.exports = getLatestNodejsVersion

View File

@@ -0,0 +1,14 @@
var log = require('npmlog')
var fetchPackageMetadata = require('../fetch-package-metadata')
function getLatestNpmVersion (cb) {
var tracker = log.newItem('getLatestNpmVersion', 1)
tracker.info('getLatestNpmVersion', 'Getting npm package information')
fetchPackageMetadata('npm@latest', '.', {}, function (err, d) {
tracker.finish()
if (err) { return cb(err) }
cb(null, d.version)
})
}
module.exports = getLatestNpmVersion

View File

@@ -0,0 +1,19 @@
'use strict'
const cacache = require('cacache')
const log = require('npmlog')
module.exports = verifyCachedFiles
function verifyCachedFiles (cache, cb) {
log.info('verifyCachedFiles', `Verifying cache at ${cache}`)
cacache.verify(cache).then((stats) => {
log.info('verifyCachedFiles', `Verification complete. Stats: ${JSON.stringify(stats, 2)}`)
if (stats.reclaimedCount || stats.badContentCount || stats.missingContent) {
stats.badContentCount && log.warn('verifyCachedFiles', `Corrupted content removed: ${stats.badContentCount}`)
stats.reclaimedCount && log.warn('verifyCachedFiles', `Content garbage-collected: ${stats.reclaimedCount} (${stats.reclaimedSize} bytes)`)
stats.missingContent && log.warn('verifyCachedFiles', `Missing content: ${stats.missingContent}`)
log.warn('verifyCachedFiles', 'Cache issues have been fixed')
}
return stats
}).then((s) => cb(null, s), cb)
}

49
website/node_modules/npm/lib/edit.js generated vendored Normal file
View File

@@ -0,0 +1,49 @@
// npm edit <pkg>
// open the package folder in the $EDITOR
module.exports = edit
edit.usage = 'npm edit <pkg>[/<subpkg>...]'
edit.completion = require('./utils/completion/installed-shallow.js')
var npm = require('./npm.js')
var path = require('path')
var fs = require('graceful-fs')
var editor = require('editor')
var noProgressTillDone = require('./utils/no-progress-while-running').tillDone
function edit (args, cb) {
var p = args[0]
if (args.length !== 1 || !p) return cb(edit.usage)
var e = npm.config.get('editor')
if (!e) {
return cb(new Error(
"No editor set. Set the 'editor' config, or $EDITOR environ."
))
}
p = p.split('/')
// combine scoped parts
.reduce(function (parts, part) {
if (parts.length === 0) {
return [part]
}
var lastPart = parts[parts.length - 1]
// check if previous part is the first part of a scoped package
if (lastPart[0] === '@' && !lastPart.includes('/')) {
parts[parts.length - 1] += '/' + part
} else {
parts.push(part)
}
return parts
}, [])
.join('/node_modules/')
.replace(/(\/node_modules)+/, '/node_modules')
var f = path.resolve(npm.dir, p)
fs.lstat(f, function (er) {
if (er) return cb(er)
editor(f, { editor: e }, noProgressTillDone(function (er) {
if (er) return cb(er)
npm.commands.rebuild(args, cb)
}))
})
}

59
website/node_modules/npm/lib/explore.js generated vendored Normal file
View File

@@ -0,0 +1,59 @@
// npm explore <pkg>[@<version>]
// open a subshell to the package folder.
module.exports = explore
explore.usage = 'npm explore <pkg> [ -- <command>]'
explore.completion = require('./utils/completion/installed-shallow.js')
var npm = require('./npm.js')
var spawn = require('./utils/spawn')
var path = require('path')
var fs = require('graceful-fs')
var isWindowsShell = require('./utils/is-windows-shell.js')
var escapeExecPath = require('./utils/escape-exec-path.js')
var escapeArg = require('./utils/escape-arg.js')
var output = require('./utils/output.js')
function explore (args, cb) {
if (args.length < 1 || !args[0]) return cb(explore.usage)
var p = args.shift()
var cwd = path.resolve(npm.dir, p)
var opts = {cwd: cwd, stdio: 'inherit'}
var shellArgs = []
if (args) {
if (isWindowsShell) {
var execCmd = escapeExecPath(args.shift())
var execArgs = [execCmd].concat(args.map(escapeArg))
opts.windowsVerbatimArguments = true
shellArgs = ['/d', '/s', '/c'].concat(execArgs)
} else {
shellArgs.unshift('-c')
shellArgs = ['-c', args.map(escapeArg).join(' ').trim()]
}
}
var sh = npm.config.get('shell')
fs.stat(cwd, function (er, s) {
if (er || !s.isDirectory()) {
return cb(new Error(
"It doesn't look like " + p + ' is installed.'
))
}
if (!shellArgs.length) {
output(
'\nExploring ' + cwd + '\n' +
"Type 'exit' or ^D when finished\n"
)
}
var shell = spawn(sh, shellArgs, opts)
shell.on('close', function (er) {
// only fail if non-interactive.
if (!shellArgs.length) return cb()
cb(er)
})
})
}

118
website/node_modules/npm/lib/fetch-package-metadata.js generated vendored Normal file
View File

@@ -0,0 +1,118 @@
'use strict'
const deprCheck = require('./utils/depr-check')
const path = require('path')
const log = require('npmlog')
const readPackageTree = require('read-package-tree')
const rimraf = require('rimraf')
const validate = require('aproba')
const npa = require('npm-package-arg')
const npm = require('./npm')
const npmlog = require('npmlog')
const limit = require('call-limit')
const tempFilename = require('./utils/temp-filename')
const pacote = require('pacote')
let pacoteOpts
const isWindows = require('./utils/is-windows.js')
function andLogAndFinish (spec, tracker, done) {
validate('SOF|SZF|OOF|OZF', [spec, tracker, done])
return (er, pkg) => {
if (er) {
log.silly('fetchPackageMetaData', 'error for ' + String(spec), er.message)
if (tracker) tracker.finish()
}
return done(er, pkg)
}
}
const CACHE = require('lru-cache')({
max: 300 * 1024 * 1024,
length: (p) => p._contentLength
})
module.exports = limit(fetchPackageMetadata, npm.limit.fetch)
function fetchPackageMetadata (spec, where, opts, done) {
validate('SSOF|SSFZ|OSOF|OSFZ', [spec, where, opts, done])
if (!done) {
done = opts
opts = {}
}
var tracker = opts.tracker
const logAndFinish = andLogAndFinish(spec, tracker, done)
if (typeof spec === 'object') {
var dep = spec
} else {
dep = npa(spec)
}
if (!isWindows && dep.type === 'directory' && /^[a-zA-Z]:/.test(dep.fetchSpec)) {
var err = new Error(`Can't install from windows path on a non-windows system: ${dep.fetchSpec.replace(/[/]/g, '\\')}`)
err.code = 'EWINDOWSPATH'
return logAndFinish(err)
}
if (!pacoteOpts) {
pacoteOpts = require('./config/pacote')
}
pacote.manifest(dep, pacoteOpts({
annotate: true,
fullMetadata: opts.fullMetadata,
log: tracker || npmlog,
memoize: CACHE,
where: where
})).then(
(pkg) => logAndFinish(null, deprCheck(pkg)),
(err) => {
if (dep.type !== 'directory') return logAndFinish(err)
if (err.code === 'ENOTDIR') {
var enolocal = new Error(`Could not install "${path.relative(process.cwd(), dep.fetchSpec)}" as it is not a directory and is not a file with a name ending in .tgz, .tar.gz or .tar`)
enolocal.code = 'ENOLOCAL'
if (err.stack) enolocal.stack = err.stack
return logAndFinish(enolocal)
} else if (err.code === 'ENOPACKAGEJSON') {
var enopackage = new Error(`Could not install from "${path.relative(process.cwd(), dep.fetchSpec)}" as it does not contain a package.json file.`)
enopackage.code = 'ENOLOCAL'
if (err.stack) enopackage.stack = err.stack
return logAndFinish(enopackage)
} else {
return logAndFinish(err)
}
}
)
}
module.exports.addBundled = addBundled
function addBundled (pkg, next) {
validate('OF', arguments)
if (!pacoteOpts) {
pacoteOpts = require('./config/pacote')
}
if (pkg._bundled !== undefined) return next(null, pkg)
if (!pkg.bundleDependencies && pkg._requested.type !== 'directory') return next(null, pkg)
const requested = pkg._requested || npa(pkg._from)
if (requested.type === 'directory') {
pkg._bundled = null
return readPackageTree(pkg._requested.fetchSpec, function (er, tree) {
if (tree) pkg._bundled = tree.children
return next(null, pkg)
})
}
pkg._bundled = null
const target = tempFilename('unpack')
const opts = pacoteOpts({integrity: pkg._integrity})
pacote.extract(pkg._resolved || pkg._requested || npa.resolve(pkg.name, pkg.version), target, opts).then(() => {
log.silly('addBundled', 'read tarball')
readPackageTree(target, (err, tree) => {
if (err) { return next(err) }
log.silly('cleanup', 'remove extracted module')
rimraf(target, function () {
if (tree) {
pkg._bundled = tree.children
}
next(null, pkg)
})
})
}, next)
}

37
website/node_modules/npm/lib/fetch-package-metadata.md generated vendored Normal file
View File

@@ -0,0 +1,37 @@
fetch-package-metadata
----------------------
var fetchPackageMetadata = require("npm/lib/fetch-package-metadata")
fetchPackageMetadata(spec, contextdir, callback)
This will get package metadata (and if possible, ONLY package metadata) for
a specifier as passed to `npm install` et al, eg `npm@next` or `npm@^2.0.3`
## fetchPackageMetadata(*spec*, *contextdir*, *tracker*, *callback*)
* *spec* **string** | **object** -- The package specifier, can be anything npm can
understand (see [realize-package-specifier]), or it can be the result from
realize-package-specifier or npm-package-arg (for non-local deps).
* *contextdir* **string** -- The directory from which relative paths to
local packages should be resolved.
* *tracker* **object** -- **(optional)** An are-we-there-yet tracker group as
provided by `npm.log.newGroup()`.
* *callback* **function (er, package)** -- Called when the package information
has been loaded. `package` is the object for of the `package.json`
matching the requested spec. In the case of named packages, it comes from
the registry and thus may not exactly match what's found in the associated
tarball.
[realize-package-specifier]: (https://github.com/npm/realize-package-specifier)
In the case of tarballs and git repos, it will use the cache to download
them in order to get the package metadata. For named packages, only the
metadata is downloaded (eg https://registry.npmjs.org/package). For local
directories, the package.json is read directly. For local tarballs, the
tarball is streamed in memory and just the package.json is extracted from
it. (Due to the nature of tars, having the package.json early in the file
will result in it being loaded faster the extractor short-circuits the
uncompress/untar streams as best as it can.)

12
website/node_modules/npm/lib/get.js generated vendored Normal file
View File

@@ -0,0 +1,12 @@
module.exports = get
get.usage = 'npm get <key> <value> (See `npm config`)'
var npm = require('./npm.js')
get.completion = npm.commands.config.completion
function get (args, cb) {
npm.commands.config(['get'].concat(args), cb)
}

212
website/node_modules/npm/lib/help-search.js generated vendored Normal file
View File

@@ -0,0 +1,212 @@
module.exports = helpSearch
var fs = require('graceful-fs')
var path = require('path')
var asyncMap = require('slide').asyncMap
var npm = require('./npm.js')
var glob = require('glob')
var color = require('ansicolors')
var output = require('./utils/output.js')
helpSearch.usage = 'npm help-search <text>'
function helpSearch (args, silent, cb) {
if (typeof cb !== 'function') {
cb = silent
silent = false
}
if (!args.length) return cb(helpSearch.usage)
var docPath = path.resolve(__dirname, '..', 'doc')
return glob(docPath + '/*/*.md', function (er, files) {
if (er) return cb(er)
readFiles(files, function (er, data) {
if (er) return cb(er)
searchFiles(args, data, function (er, results) {
if (er) return cb(er)
formatResults(args, results, cb)
})
})
})
}
function readFiles (files, cb) {
var res = {}
asyncMap(files, function (file, cb) {
fs.readFile(file, 'utf8', function (er, data) {
res[file] = data
return cb(er)
})
}, function (er) {
return cb(er, res)
})
}
function searchFiles (args, files, cb) {
var results = []
Object.keys(files).forEach(function (file) {
var data = files[file]
// skip if no matches at all
var match
for (var a = 0, l = args.length; a < l && !match; a++) {
match = data.toLowerCase().indexOf(args[a].toLowerCase()) !== -1
}
if (!match) return
var lines = data.split(/\n+/)
// if a line has a search term, then skip it and the next line.
// if the next line has a search term, then skip all 3
// otherwise, set the line to null. then remove the nulls.
l = lines.length
for (var i = 0; i < l; i++) {
var line = lines[i]
var nextLine = lines[i + 1]
var ll
match = false
if (nextLine) {
for (a = 0, ll = args.length; a < ll && !match; a++) {
match = nextLine.toLowerCase()
.indexOf(args[a].toLowerCase()) !== -1
}
if (match) {
// skip over the next line, and the line after it.
i += 2
continue
}
}
match = false
for (a = 0, ll = args.length; a < ll && !match; a++) {
match = line.toLowerCase().indexOf(args[a].toLowerCase()) !== -1
}
if (match) {
// skip over the next line
i++
continue
}
lines[i] = null
}
// now squish any string of nulls into a single null
lines = lines.reduce(function (l, r) {
if (!(r === null && l[l.length - 1] === null)) l.push(r)
return l
}, [])
if (lines[lines.length - 1] === null) lines.pop()
if (lines[0] === null) lines.shift()
// now see how many args were found at all.
var found = {}
var totalHits = 0
lines.forEach(function (line) {
args.forEach(function (arg) {
var hit = (line || '').toLowerCase()
.split(arg.toLowerCase()).length - 1
if (hit > 0) {
found[arg] = (found[arg] || 0) + hit
totalHits += hit
}
})
})
var cmd = 'npm help '
if (path.basename(path.dirname(file)) === 'api') {
cmd = 'npm apihelp '
}
cmd += path.basename(file, '.md').replace(/^npm-/, '')
results.push({
file: file,
cmd: cmd,
lines: lines,
found: Object.keys(found),
hits: found,
totalHits: totalHits
})
})
// if only one result, then just show that help section.
if (results.length === 1) {
return npm.commands.help([results[0].file.replace(/\.md$/, '')], cb)
}
if (results.length === 0) {
output('No results for ' + args.map(JSON.stringify).join(' '))
return cb()
}
// sort results by number of results found, then by number of hits
// then by number of matching lines
results = results.sort(function (a, b) {
return a.found.length > b.found.length ? -1
: a.found.length < b.found.length ? 1
: a.totalHits > b.totalHits ? -1
: a.totalHits < b.totalHits ? 1
: a.lines.length > b.lines.length ? -1
: a.lines.length < b.lines.length ? 1
: 0
})
cb(null, results)
}
function formatResults (args, results, cb) {
if (!results) return cb(null)
var cols = Math.min(process.stdout.columns || Infinity, 80) + 1
var out = results.map(function (res) {
var out = res.cmd
var r = Object.keys(res.hits)
.map(function (k) {
return k + ':' + res.hits[k]
}).sort(function (a, b) {
return a > b ? 1 : -1
}).join(' ')
out += ((new Array(Math.max(1, cols - out.length - r.length)))
.join(' ')) + r
if (!npm.config.get('long')) return out
out = '\n\n' + out + '\n' +
(new Array(cols)).join('—') + '\n' +
res.lines.map(function (line, i) {
if (line === null || i > 3) return ''
for (var out = line, a = 0, l = args.length; a < l; a++) {
var finder = out.toLowerCase().split(args[a].toLowerCase())
var newOut = ''
var p = 0
finder.forEach(function (f) {
newOut += out.substr(p, f.length)
var hilit = out.substr(p + f.length, args[a].length)
if (npm.color) hilit = color.bgBlack(color.red(hilit))
newOut += hilit
p += f.length + args[a].length
})
}
return newOut
}).join('\n').trim()
return out
}).join('\n')
if (results.length && !npm.config.get('long')) {
out = 'Top hits for ' + (args.map(JSON.stringify).join(' ')) + '\n' +
(new Array(cols)).join('—') + '\n' +
out + '\n' +
(new Array(cols)).join('—') + '\n' +
'(run with -l or --long to see more context)'
}
output(out.trim())
cb(null, results)
}

247
website/node_modules/npm/lib/help.js generated vendored Normal file
View File

@@ -0,0 +1,247 @@
module.exports = help
help.completion = function (opts, cb) {
if (opts.conf.argv.remain.length > 2) return cb(null, [])
getSections(cb)
}
var path = require('path')
var spawn = require('./utils/spawn')
var npm = require('./npm.js')
var log = require('npmlog')
var openUrl = require('./utils/open-url')
var glob = require('glob')
var didYouMean = require('./utils/did-you-mean')
var cmdList = require('./config/cmd-list').cmdList
var shorthands = require('./config/cmd-list').shorthands
var commands = cmdList.concat(Object.keys(shorthands))
var output = require('./utils/output.js')
function help (args, cb) {
var argv = npm.config.get('argv').cooked
var argnum = 0
if (args.length === 2 && ~~args[0]) {
argnum = ~~args.shift()
}
// npm help foo bar baz: search topics
if (args.length > 1 && args[0]) {
return npm.commands['help-search'](args, argnum, cb)
}
var section = npm.deref(args[0]) || args[0]
// npm help <noargs>: show basic usage
if (!section) {
var valid = argv[0] === 'help' ? 0 : 1
return npmUsage(valid, cb)
}
// npm <command> -h: show command usage
if (npm.config.get('usage') &&
npm.commands[section] &&
npm.commands[section].usage) {
npm.config.set('loglevel', 'silent')
log.level = 'silent'
output(npm.commands[section].usage)
return cb()
}
// npm apihelp <section>: Prefer section 3 over section 1
var apihelp = argv.length && argv[0].indexOf('api') !== -1
var pref = apihelp ? [3, 1, 5, 7] : [1, 3, 5, 7]
if (argnum) {
pref = [ argnum ].concat(pref.filter(function (n) {
return n !== argnum
}))
}
// npm help <section>: Try to find the path
var manroot = path.resolve(__dirname, '..', 'man')
// legacy
if (section === 'global') section = 'folders'
else if (section === 'json') section = 'package.json'
// find either /section.n or /npm-section.n
// The glob is used in the glob. The regexp is used much
// further down. Globs and regexps are different
var compextglob = '.+(gz|bz2|lzma|[FYzZ]|xz)'
var compextre = '\\.(gz|bz2|lzma|[FYzZ]|xz)$'
var f = '+(npm-' + section + '|' + section + ').[0-9]?(' + compextglob + ')'
return glob(manroot + '/*/' + f, function (er, mans) {
if (er) return cb(er)
if (!mans.length) return npm.commands['help-search'](args, cb)
mans = mans.map(function (man) {
var ext = path.extname(man)
if (man.match(new RegExp(compextre))) man = path.basename(man, ext)
return man
})
viewMan(pickMan(mans, pref), cb)
})
}
function pickMan (mans, pref_) {
var nre = /([0-9]+)$/
var pref = {}
pref_.forEach(function (sect, i) {
pref[sect] = i
})
mans = mans.sort(function (a, b) {
var an = a.match(nre)[1]
var bn = b.match(nre)[1]
return an === bn ? (a > b ? -1 : 1)
: pref[an] < pref[bn] ? -1
: 1
})
return mans[0]
}
function viewMan (man, cb) {
var nre = /([0-9]+)$/
var num = man.match(nre)[1]
var section = path.basename(man, '.' + num)
// at this point, we know that the specified man page exists
var manpath = path.join(__dirname, '..', 'man')
var env = {}
Object.keys(process.env).forEach(function (i) {
env[i] = process.env[i]
})
env.MANPATH = manpath
var viewer = npm.config.get('viewer')
var conf
switch (viewer) {
case 'woman':
var a = ['-e', '(woman-find-file \'' + man + '\')']
conf = { env: env, stdio: 'inherit' }
var woman = spawn('emacsclient', a, conf)
woman.on('close', cb)
break
case 'browser':
openUrl(htmlMan(man), 'help available at the following URL', cb)
break
default:
conf = { env: env, stdio: 'inherit' }
var manProcess = spawn('man', [num, section], conf)
manProcess.on('close', cb)
break
}
}
function htmlMan (man) {
var sect = +man.match(/([0-9]+)$/)[1]
var f = path.basename(man).replace(/([0-9]+)$/, 'html')
switch (sect) {
case 1:
sect = 'cli'
break
case 3:
sect = 'api'
break
case 5:
sect = 'files'
break
case 7:
sect = 'misc'
break
default:
throw new Error('invalid man section: ' + sect)
}
return path.resolve(__dirname, '..', 'html', 'doc', sect, f)
}
function npmUsage (valid, cb) {
npm.config.set('loglevel', 'silent')
log.level = 'silent'
output([
'\nUsage: npm <command>',
'',
'where <command> is one of:',
npm.config.get('long') ? usages()
: ' ' + wrap(commands),
'',
'npm <command> -h quick help on <command>',
'npm -l display full usage info',
'npm help <term> search for help on <term>',
'npm help npm involved overview',
'',
'Specify configs in the ini-formatted file:',
' ' + npm.config.get('userconfig'),
'or on the command line via: npm <command> --key value',
'Config info can be viewed via: npm help config',
'',
'npm@' + npm.version + ' ' + path.dirname(__dirname)
].join('\n'))
if (npm.argv.length > 1) {
output(didYouMean(npm.argv[1], commands))
}
cb(valid)
}
function usages () {
// return a string of <command>: <usage>
var maxLen = 0
return Object.keys(npm.commands).filter(function (c) {
return c === npm.deref(c)
}).reduce(function (set, c) {
set.push([c, npm.commands[c].usage || ''])
maxLen = Math.max(maxLen, c.length)
return set
}, []).map(function (item) {
var c = item[0]
var usage = item[1]
return '\n ' +
c + (new Array(maxLen - c.length + 2).join(' ')) +
(usage.split('\n').join('\n' + (new Array(maxLen + 6).join(' '))))
}).join('\n')
}
function wrap (arr) {
var out = ['']
var l = 0
var line
line = process.stdout.columns
if (!line) {
line = 60
} else {
line = Math.min(60, Math.max(line - 16, 24))
}
arr.sort(function (a, b) { return a < b ? -1 : 1 })
.forEach(function (c) {
if (out[l].length + c.length + 2 < line) {
out[l] += ', ' + c
} else {
out[l++] += ','
out[l] = c
}
})
return out.join('\n ').substr(2)
}
function getSections (cb) {
var g = path.resolve(__dirname, '../man/man[0-9]/*.[0-9]')
glob(g, function (er, files) {
if (er) return cb(er)
cb(null, Object.keys(files.reduce(function (acc, file) {
file = path.basename(file).replace(/\.[0-9]+$/, '')
file = file.replace(/^npm-/, '')
acc[file] = true
return acc
}, { help: true })))
})
}

135
website/node_modules/npm/lib/hook.js generated vendored Normal file
View File

@@ -0,0 +1,135 @@
'use strict'
const BB = require('bluebird')
const crypto = require('crypto')
const hookApi = require('libnpmhook')
const log = require('npmlog')
const npm = require('./npm.js')
const output = require('./utils/output.js')
const pudding = require('figgy-pudding')
const relativeDate = require('tiny-relative-date')
const Table = require('cli-table3')
const usage = require('./utils/usage.js')
const validate = require('aproba')
hook.usage = usage([
'npm hook add <pkg> <url> <secret> [--type=<type>]',
'npm hook ls [pkg]',
'npm hook rm <id>',
'npm hook update <id> <url> <secret>'
])
hook.completion = (opts, cb) => {
validate('OF', [opts, cb])
return cb(null, []) // fill in this array with completion values
}
const npmSession = crypto.randomBytes(8).toString('hex')
const hookConfig = pudding()
function config () {
return hookConfig({
refer: npm.refer,
projectScope: npm.projectScope,
log,
npmSession
}, npm.config)
}
module.exports = (args, cb) => BB.try(() => hook(args)).nodeify(cb)
function hook (args) {
switch (args[0]) {
case 'add':
return add(args[1], args[2], args[3])
case 'ls':
return ls(args[1])
case 'rm':
return rm(args[1])
case 'update':
case 'up':
return update(args[1], args[2], args[3])
}
}
function add (pkg, uri, secret) {
return hookApi.add(pkg, uri, secret, config())
.then((hook) => {
if (npm.config.get('json')) {
output(JSON.stringify(hook, null, 2))
} else {
output(`+ ${hookName(hook)} ${
npm.config.get('unicode') ? ' ➜ ' : ' -> '
} ${hook.endpoint}`)
}
})
}
function ls (pkg) {
return hookApi.ls(pkg, config())
.then((hooks) => {
if (npm.config.get('json')) {
output(JSON.stringify(hooks, null, 2))
} else if (!hooks.length) {
output("You don't have any hooks configured yet.")
} else {
if (hooks.length === 1) {
output('You have one hook configured.')
} else {
output(`You have ${hooks.length} hooks configured.`)
}
const table = new Table({head: ['id', 'target', 'endpoint']})
hooks.forEach((hook) => {
table.push([
{rowSpan: 2, content: hook.id},
hookName(hook),
hook.endpoint
])
if (hook.last_delivery) {
table.push([
{
colSpan: 1,
content: `triggered ${relativeDate(hook.last_delivery)}`
},
hook.response_code
])
} else {
table.push([{colSpan: 2, content: 'never triggered'}])
}
})
output(table.toString())
}
})
}
function rm (id) {
return hookApi.rm(id, config())
.then((hook) => {
if (npm.config.get('json')) {
output(JSON.stringify(hook, null, 2))
} else {
output(`- ${hookName(hook)} ${
npm.config.get('unicode') ? ' ✘ ' : ' X '
} ${hook.endpoint}`)
}
})
}
function update (id, uri, secret) {
return hookApi.update(id, uri, secret, config())
.then((hook) => {
if (npm.config.get('json')) {
output(JSON.stringify(hook, null, 2))
} else {
output(`+ ${hookName(hook)} ${
npm.config.get('unicode') ? ' ➜ ' : ' -> '
} ${hook.endpoint}`)
}
})
}
function hookName (hook) {
let target = hook.name
if (hook.type === 'scope') { target = '@' + target }
if (hook.type === 'owner') { target = '~' + target }
return target
}

85
website/node_modules/npm/lib/init.js generated vendored Normal file
View File

@@ -0,0 +1,85 @@
// initialize a package.json file
module.exports = init
var path = require('path')
var log = require('npmlog')
var npa = require('npm-package-arg')
var npm = require('./npm.js')
var npx = require('libnpx')
var initJson = require('init-package-json')
var isRegistry = require('./utils/is-registry.js')
var output = require('./utils/output.js')
var noProgressTillDone = require('./utils/no-progress-while-running').tillDone
var usage = require('./utils/usage')
init.usage = usage(
'init',
'\nnpm init [--force|-f|--yes|-y|--scope]' +
'\nnpm init <@scope> (same as `npx <@scope>/create`)' +
'\nnpm init [<@scope>/]<name> (same as `npx [<@scope>/]create-<name>`)'
)
function init (args, cb) {
if (args.length) {
var NPM_PATH = path.resolve(__dirname, '../bin/npm-cli.js')
var initerName = args[0]
var packageName = initerName
if (/^@[^/]+$/.test(initerName)) {
packageName = initerName + '/create'
} else {
var req = npa(initerName)
if (req.type === 'git' && req.hosted) {
var { user, project } = req.hosted
packageName = initerName
.replace(user + '/' + project, user + '/create-' + project)
} else if (isRegistry(req)) {
packageName = req.name.replace(/^(@[^/]+\/)?/, '$1create-')
if (req.rawSpec) {
packageName += '@' + req.rawSpec
}
} else {
var err = new Error(
'Unrecognized initializer: ' + initerName +
'\nFor more package binary executing power check out `npx`:' +
'\nhttps://www.npmjs.com/package/npx'
)
err.code = 'EUNSUPPORTED'
throw err
}
}
var npxArgs = [process.argv0, '[fake arg]', '--always-spawn', packageName, ...process.argv.slice(4)]
var parsed = npx.parseArgs(npxArgs, NPM_PATH)
return npx(parsed)
.then(() => cb())
.catch(cb)
}
var dir = process.cwd()
log.pause()
var initFile = npm.config.get('init-module')
if (!initJson.yes(npm.config)) {
output([
'This utility will walk you through creating a package.json file.',
'It only covers the most common items, and tries to guess sensible defaults.',
'',
'See `npm help json` for definitive documentation on these fields',
'and exactly what they do.',
'',
'Use `npm install <pkg>` afterwards to install a package and',
'save it as a dependency in the package.json file.',
'',
'Press ^C at any time to quit.'
].join('\n'))
}
initJson(dir, initFile, npm.config, noProgressTillDone(function (er, data) {
log.resume()
log.silly('package data', data)
if (er && er.message === 'canceled') {
log.warn('init', 'canceled')
return cb(null, data)
}
log.info('init', 'written successfully')
cb(er, data)
}))
}

26
website/node_modules/npm/lib/install-ci-test.js generated vendored Normal file
View File

@@ -0,0 +1,26 @@
'use strict'
// npm install-ci-test
// Runs `npm ci` and then runs `npm test`
module.exports = installTest
var ci = require('./ci.js')
var test = require('./test.js')
var usage = require('./utils/usage')
installTest.usage = usage(
'install-ci-test',
'\nnpm install-ci-test [args]' +
'\nSame args as `npm ci`'
)
installTest.completion = ci.completion
function installTest (args, cb) {
ci(args, function (er) {
if (er) {
return cb(er)
}
test([], cb)
})
}

26
website/node_modules/npm/lib/install-test.js generated vendored Normal file
View File

@@ -0,0 +1,26 @@
'use strict'
// npm install-test
// Runs `npm install` and then runs `npm test`
module.exports = installTest
var install = require('./install.js')
var test = require('./test.js')
var usage = require('./utils/usage')
installTest.usage = usage(
'install-test',
'\nnpm install-test [args]' +
'\nSame args as `npm install`'
)
installTest.completion = install.completion
function installTest (args, cb) {
install(args, function (er) {
if (er) {
return cb(er)
}
test([], cb)
})
}

1016
website/node_modules/npm/lib/install.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

8
website/node_modules/npm/lib/install/access-error.js generated vendored Normal file
View File

@@ -0,0 +1,8 @@
'use strict'
module.exports = function (dir, er) {
if (!er) return
var accessEr = new Error("EACCES, access '" + dir + "'", -13)
accessEr.code = 'EACCES'
accessEr.path = dir
return accessEr
}

13
website/node_modules/npm/lib/install/action/build.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
'use strict'
var chain = require('slide').chain
var build = require('../../build.js')
var npm = require('../../npm.js')
var packageId = require('../../utils/package-id.js')
module.exports = function (staging, pkg, log, next) {
log.silly('build', packageId(pkg))
chain([
[build.linkStuff, pkg.package, pkg.path, npm.config.get('global')],
[build.writeBuiltinConf, pkg.package, pkg.path]
], next)
}

View File

@@ -0,0 +1,18 @@
'use strict'
const BB = require('bluebird')
const extract = require('pacote/extract')
const npmlog = require('npmlog')
module.exports = (args, cb) => {
const parsed = typeof args === 'string' ? JSON.parse(args) : args
const spec = parsed[0]
const extractTo = parsed[1]
const opts = parsed[2]
if (!opts.log) {
opts.log = npmlog
}
opts.log.level = opts.loglevel || opts.log.level
BB.resolve(extract(spec, extractTo, opts)).nodeify(cb)
}

136
website/node_modules/npm/lib/install/action/extract.js generated vendored Normal file
View File

@@ -0,0 +1,136 @@
'use strict'
const BB = require('bluebird')
const stat = BB.promisify(require('graceful-fs').stat)
const gentlyRm = BB.promisify(require('../../utils/gently-rm.js'))
const mkdirp = BB.promisify(require('mkdirp'))
const moduleStagingPath = require('../module-staging-path.js')
const move = require('../../utils/move.js')
const npa = require('npm-package-arg')
const npm = require('../../npm.js')
const packageId = require('../../utils/package-id.js')
let pacoteOpts
const path = require('path')
const localWorker = require('./extract-worker.js')
const workerFarm = require('worker-farm')
const isRegistry = require('../../utils/is-registry.js')
const WORKER_PATH = require.resolve('./extract-worker.js')
let workers
// NOTE: temporarily disabled on non-OSX due to ongoing issues:
//
// * Seems to make Windows antivirus issues much more common
// * Messes with Docker (I think)
//
// There are other issues that should be fixed that affect OSX too:
//
// * Logging is messed up right now because pacote does its own thing
// * Global deduplication in pacote breaks due to multiple procs
//
// As these get fixed, we can start experimenting with re-enabling it
// at least on some platforms.
const ENABLE_WORKERS = process.platform === 'darwin'
extract.init = () => {
if (ENABLE_WORKERS) {
workers = workerFarm({
maxConcurrentCallsPerWorker: npm.limit.fetch,
maxRetries: 1
}, WORKER_PATH)
}
return BB.resolve()
}
extract.teardown = () => {
if (ENABLE_WORKERS) {
workerFarm.end(workers)
workers = null
}
return BB.resolve()
}
module.exports = extract
function extract (staging, pkg, log) {
log.silly('extract', packageId(pkg))
const extractTo = moduleStagingPath(staging, pkg)
if (!pacoteOpts) {
pacoteOpts = require('../../config/pacote')
}
const opts = pacoteOpts({
integrity: pkg.package._integrity,
resolved: pkg.package._resolved
})
const args = [
pkg.package._requested,
extractTo,
opts
]
return BB.fromNode((cb) => {
let launcher = localWorker
let msg = args
const spec = typeof args[0] === 'string' ? npa(args[0]) : args[0]
args[0] = spec.raw
if (ENABLE_WORKERS && (isRegistry(spec) || spec.type === 'remote')) {
// We can't serialize these options
opts.loglevel = opts.log.level
opts.log = null
opts.dirPacker = null
// workers will run things in parallel!
launcher = workers
try {
msg = JSON.stringify(msg)
} catch (e) {
return cb(e)
}
}
launcher(msg, cb)
}).then(() => {
if (pkg.package.bundleDependencies || anyBundled(pkg)) {
return readBundled(pkg, staging, extractTo)
}
}).then(() => {
return gentlyRm(path.join(extractTo, 'node_modules'))
})
}
function anyBundled (top, pkg) {
if (!pkg) pkg = top
return pkg.children.some((child) => child.fromBundle === top || anyBundled(top, child))
}
function readBundled (pkg, staging, extractTo) {
return BB.map(pkg.children, (child) => {
if (!child.fromBundle) return
if (child.error) {
throw child.error
} else {
return stageBundledModule(pkg, child, staging, extractTo)
}
}, {concurrency: 10})
}
function stageBundledModule (bundler, child, staging, parentPath) {
const stageFrom = path.join(parentPath, 'node_modules', child.package.name)
const stageTo = moduleStagingPath(staging, child)
return BB.map(child.children, (child) => {
if (child.error) {
throw child.error
} else {
return stageBundledModule(bundler, child, staging, stageFrom)
}
}).then(() => {
return finishModule(bundler, child, stageTo, stageFrom)
})
}
function finishModule (bundler, child, stageTo, stageFrom) {
// If we were the one's who bundled this module…
if (child.fromBundle === bundler) {
return mkdirp(path.dirname(stageTo)).then(() => {
return move(stageFrom, stageTo)
})
} else {
return stat(stageFrom).then(() => gentlyRm(stageFrom), () => {})
}
}

16
website/node_modules/npm/lib/install/action/fetch.js generated vendored Normal file
View File

@@ -0,0 +1,16 @@
'use strict'
const BB = require('bluebird')
const finished = BB.promisify(require('mississippi').finished)
const packageId = require('../../utils/package-id.js')
const pacote = require('pacote')
const pacoteOpts = require('../../config/pacote')
module.exports = fetch
function fetch (staging, pkg, log, next) {
log.silly('fetch', packageId(pkg))
const opts = pacoteOpts({integrity: pkg.package._integrity})
return finished(pacote.tarball.stream(pkg.package._requested, opts))
.then(() => next(), next)
}

106
website/node_modules/npm/lib/install/action/finalize.js generated vendored Normal file
View File

@@ -0,0 +1,106 @@
'use strict'
const path = require('path')
const fs = require('graceful-fs')
const Bluebird = require('bluebird')
const rimraf = Bluebird.promisify(require('rimraf'))
const mkdirp = Bluebird.promisify(require('mkdirp'))
const lstat = Bluebird.promisify(fs.lstat)
const readdir = Bluebird.promisify(fs.readdir)
const symlink = Bluebird.promisify(fs.symlink)
const gentlyRm = Bluebird.promisify(require('../../utils/gently-rm'))
const moduleStagingPath = require('../module-staging-path.js')
const move = require('move-concurrently')
const moveOpts = {fs: fs, Promise: Bluebird, maxConcurrency: 4}
const getRequested = require('../get-requested.js')
const log = require('npmlog')
const packageId = require('../../utils/package-id.js')
module.exports = function (staging, pkg, log) {
log.silly('finalize', pkg.realpath)
const extractedTo = moduleStagingPath(staging, pkg)
const delpath = path.join(path.dirname(pkg.realpath), '.' + path.basename(pkg.realpath) + '.DELETE')
let movedDestAway = false
const requested = pkg.package._requested || getRequested(pkg)
if (requested.type === 'directory') {
const relative = path.relative(path.dirname(pkg.path), pkg.realpath)
return makeParentPath(pkg.path)
.then(() => symlink(relative, pkg.path, 'junction'))
.catch((ex) => {
return rimraf(pkg.path).then(() => symlink(relative, pkg.path, 'junction'))
})
} else {
return makeParentPath(pkg.realpath)
.then(moveStagingToDestination)
.then(restoreOldNodeModules)
.catch((err) => {
if (movedDestAway) {
return rimraf(pkg.realpath).then(moveOldDestinationBack).then(() => {
throw err
})
} else {
throw err
}
})
.then(() => rimraf(delpath))
}
function makeParentPath (dir) {
return mkdirp(path.dirname(dir))
}
function moveStagingToDestination () {
return destinationIsClear()
.then(actuallyMoveStaging)
.catch(() => moveOldDestinationAway().then(actuallyMoveStaging))
}
function destinationIsClear () {
return lstat(pkg.realpath).then(() => {
throw new Error('destination exists')
}, () => {})
}
function actuallyMoveStaging () {
return move(extractedTo, pkg.realpath, moveOpts)
}
function moveOldDestinationAway () {
return rimraf(delpath).then(() => {
return move(pkg.realpath, delpath, moveOpts)
}).then(() => { movedDestAway = true })
}
function moveOldDestinationBack () {
return move(delpath, pkg.realpath, moveOpts).then(() => { movedDestAway = false })
}
function restoreOldNodeModules () {
if (!movedDestAway) return
return readdir(path.join(delpath, 'node_modules')).catch(() => []).then((modules) => {
if (!modules.length) return
return mkdirp(path.join(pkg.realpath, 'node_modules')).then(() => Bluebird.map(modules, (file) => {
const from = path.join(delpath, 'node_modules', file)
const to = path.join(pkg.realpath, 'node_modules', file)
return move(from, to, moveOpts)
}))
})
}
}
module.exports.rollback = function (top, staging, pkg) {
return Bluebird.try(() => {
const requested = pkg.package._requested || getRequested(pkg)
if (requested && requested.type === 'directory') return Promise.resolve()
// strictly speaking rolling back a finalize should ONLY remove module that
// was being finalized, not any of the things under it. But currently
// those modules are guaranteed to be useless so we may as well remove them too.
// When/if we separate `commit` step and can rollback to previous versions
// of upgraded modules then we'll need to revisit this…
return gentlyRm(pkg.path, false, top).catch((err) => {
log.warn('rollback', `Rolling back ${packageId(pkg)} failed (this is probably harmless): ${err.message ? err.message : err}`)
})
})
}

View File

@@ -0,0 +1,17 @@
'use strict'
var path = require('path')
var npm = require('../../npm.js')
var Installer = require('../../install.js').Installer
var packageId = require('../../utils/package-id.js')
module.exports = function (staging, pkg, log, next) {
log.silly('global-install', packageId(pkg))
var globalRoot = path.resolve(npm.globalDir, '..')
npm.config.set('global', true)
var install = new Installer(globalRoot, false, [pkg.package.name + '@' + pkg.package._requested.fetchSpec])
install.link = false
install.run(function () {
npm.config.set('global', false)
next.apply(null, arguments)
})
}

View File

@@ -0,0 +1,8 @@
'use strict'
var npm = require('../../npm.js')
var packageId = require('../../utils/package-id.js')
module.exports = function (staging, pkg, log, next) {
log.silly('global-link', packageId(pkg))
npm.link(pkg.package.name, next)
}

View File

@@ -0,0 +1,8 @@
'use strict'
var lifecycle = require('../../utils/lifecycle.js')
var packageId = require('../../utils/package-id.js')
module.exports = function (staging, pkg, log, next) {
log.silly('install', packageId(pkg))
lifecycle(pkg.package, 'install', pkg.path, next)
}

96
website/node_modules/npm/lib/install/action/move.js generated vendored Normal file
View File

@@ -0,0 +1,96 @@
'use strict'
var fs = require('graceful-fs')
var path = require('path')
var chain = require('slide').chain
var iferr = require('iferr')
var rimraf = require('rimraf')
var mkdirp = require('mkdirp')
var rmStuff = require('../../unbuild.js').rmStuff
var lifecycle = require('../../utils/lifecycle.js')
var move = require('../../utils/move.js')
/*
Move a module from one point in the node_modules tree to another.
Do not disturb either the source or target location's node_modules
folders.
*/
module.exports = function (staging, pkg, log, next) {
log.silly('move', pkg.fromPath, pkg.path)
chain([
[lifecycle, pkg.package, 'preuninstall', pkg.fromPath, { failOk: true }],
[lifecycle, pkg.package, 'uninstall', pkg.fromPath, { failOk: true }],
[rmStuff, pkg.package, pkg.fromPath],
[lifecycle, pkg.package, 'postuninstall', pkg.fromPath, { failOk: true }],
[moveModuleOnly, pkg.fromPath, pkg.path, log],
[lifecycle, pkg.package, 'preinstall', pkg.path, { failOk: true }],
[removeEmptyParents, path.resolve(pkg.fromPath, '..')]
], next)
}
function removeEmptyParents (pkgdir, next) {
fs.rmdir(pkgdir, function (er) {
// FIXME: Make sure windows does what we want here
if (er && er.code !== 'ENOENT') return next()
removeEmptyParents(path.resolve(pkgdir, '..'), next)
})
}
function moveModuleOnly (from, to, log, done) {
var fromModules = path.join(from, 'node_modules')
var tempFromModules = from + '.node_modules'
var toModules = path.join(to, 'node_modules')
var tempToModules = to + '.node_modules'
log.silly('move', 'move existing destination node_modules away', toModules)
move(toModules, tempToModules).then(removeDestination(done), removeDestination(done))
function removeDestination (next) {
return function (er) {
log.silly('move', 'remove existing destination', to)
if (er) {
rimraf(to, iferr(next, makeDestination(next)))
} else {
rimraf(to, iferr(next, makeDestination(iferr(next, moveToModulesBack(next)))))
}
}
}
function moveToModulesBack (next) {
return function () {
log.silly('move', 'move existing destination node_modules back', toModules)
move(tempToModules, toModules).then(next, done)
}
}
function makeDestination (next) {
return function () {
log.silly('move', 'make sure destination parent exists', path.resolve(to, '..'))
mkdirp(path.resolve(to, '..'), iferr(done, moveNodeModules(next)))
}
}
function moveNodeModules (next) {
return function () {
log.silly('move', 'move source node_modules away', fromModules)
move(fromModules, tempFromModules).then(doMove(moveNodeModulesBack(next)), doMove(next))
}
}
function doMove (next) {
return function () {
log.silly('move', 'move module dir to final dest', from, to)
move(from, to).then(next, done)
}
}
function moveNodeModulesBack (next) {
return function () {
mkdirp(from, iferr(done, function () {
log.silly('move', 'put source node_modules back', fromModules)
move(tempFromModules, fromModules).then(next, done)
}))
}
}
}

View File

@@ -0,0 +1,8 @@
'use strict'
var lifecycle = require('../../utils/lifecycle.js')
var packageId = require('../../utils/package-id.js')
module.exports = function (staging, pkg, log, next) {
log.silly('postinstall', packageId(pkg))
lifecycle(pkg.package, 'postinstall', pkg.path, next)
}

View File

@@ -0,0 +1,8 @@
'use strict'
var lifecycle = require('../../utils/lifecycle.js')
var packageId = require('../../utils/package-id.js')
module.exports = function (staging, pkg, log, next) {
log.silly('preinstall', packageId(pkg))
lifecycle(pkg.package, 'preinstall', pkg.path, next)
}

27
website/node_modules/npm/lib/install/action/prepare.js generated vendored Normal file
View File

@@ -0,0 +1,27 @@
'use strict'
var chain = require('slide').chain
var lifecycle = require('../../utils/lifecycle.js')
var packageId = require('../../utils/package-id.js')
var prepublishWarning = require('../../utils/warn-deprecated.js')('prepublish-on-install')
var moduleStagingPath = require('../module-staging-path.js')
module.exports = function (staging, pkg, log, next) {
log.silly('prepublish', packageId(pkg))
// TODO: for `npm@5`, change the behavior and remove this warning.
// see https://github.com/npm/npm/issues/10074 for details
if (pkg.package && pkg.package.scripts && pkg.package.scripts.prepublish) {
prepublishWarning([
'As of npm@5, `prepublish` scripts are deprecated.',
'Use `prepare` for build steps and `prepublishOnly` for upload-only.',
'See the deprecation note in `npm help scripts` for more information.'
])
}
var buildpath = moduleStagingPath(staging, pkg)
chain(
[
[lifecycle, pkg.package, 'prepublish', buildpath],
[lifecycle, pkg.package, 'prepare', buildpath]
],
next
)
}

View File

@@ -0,0 +1,45 @@
'use strict'
const Bluebird = require('bluebird')
const checkPlatform = Bluebird.promisify(require('npm-install-checks').checkPlatform)
const getRequested = require('../get-requested.js')
const npm = require('../../npm.js')
const path = require('path')
const readJson = Bluebird.promisify(require('read-package-json'))
const updatePackageJson = Bluebird.promisify(require('../update-package-json'))
module.exports = function (staging, pkg, log) {
log.silly('refresh-package-json', pkg.realpath)
return readJson(path.join(pkg.path, 'package.json'), false).then((metadata) => {
Object.keys(pkg.package).forEach(function (key) {
if (key !== 'version' && key !== 'dependencies' && !isEmpty(pkg.package[key])) {
metadata[key] = pkg.package[key]
}
})
if (metadata._resolved == null && pkg.fakeChild) {
metadata._resolved = pkg.fakeChild.resolved
}
// These two sneak in and it's awful
delete metadata.readme
delete metadata.readmeFilename
pkg.package = metadata
pkg.fakeChild = false
}).catch(() => 'ignore').then(() => {
return checkPlatform(pkg.package, npm.config.get('force'))
}).then(() => {
const requested = pkg.package._requested || getRequested(pkg)
if (requested.type !== 'directory') {
return updatePackageJson(pkg, pkg.path)
}
})
}
function isEmpty (value) {
if (value == null) return true
if (Array.isArray(value)) return !value.length
if (typeof value === 'object') return !Object.keys(value).length
return false
}

85
website/node_modules/npm/lib/install/action/remove.js generated vendored Normal file
View File

@@ -0,0 +1,85 @@
'use strict'
var path = require('path')
var fs = require('graceful-fs')
var rimraf = require('rimraf')
var asyncMap = require('slide').asyncMap
var mkdirp = require('mkdirp')
var npm = require('../../npm.js')
var andIgnoreErrors = require('../and-ignore-errors.js')
var move = require('../../utils/move.js')
var isInside = require('path-is-inside')
var vacuum = require('fs-vacuum')
// This is weird because we want to remove the module but not it's node_modules folder
// allowing for this allows us to not worry about the order of operations
module.exports = function (staging, pkg, log, next) {
log.silly('remove', pkg.path)
if (pkg.target) {
removeLink(pkg, next)
} else {
removeDir(pkg, log, next)
}
}
function removeLink (pkg, next) {
var base = isInside(pkg.path, npm.prefix) ? npm.prefix : pkg.path
rimraf(pkg.path, (err) => {
if (err) return next(err)
vacuum(pkg.path, {base: base}, next)
})
}
function removeDir (pkg, log, next) {
var modpath = path.join(path.dirname(pkg.path), '.' + path.basename(pkg.path) + '.MODULES')
move(path.join(pkg.path, 'node_modules'), modpath).then(unbuildPackage, unbuildPackage)
function unbuildPackage (moveEr) {
rimraf(pkg.path, moveEr ? andRemoveEmptyParents(pkg.path) : moveModulesBack)
}
function andRemoveEmptyParents (path) {
return function (er) {
if (er) return next(er)
removeEmptyParents(pkg.path)
}
}
function moveModulesBack () {
fs.readdir(modpath, makeTarget)
}
function makeTarget (readdirEr, files) {
if (readdirEr) return cleanup()
if (!files.length) return cleanup()
mkdirp(path.join(pkg.path, 'node_modules'), function (mkdirEr) { moveModules(mkdirEr, files) })
}
function moveModules (mkdirEr, files) {
if (mkdirEr) return next(mkdirEr)
asyncMap(files, function (file, done) {
var from = path.join(modpath, file)
var to = path.join(pkg.path, 'node_modules', file)
// we ignore errors here, because they can legitimately happen, for instance,
// bundled modules will be in both node_modules folders
move(from, to).then(andIgnoreErrors(done), andIgnoreErrors(done))
}, cleanup)
}
function cleanup () {
rimraf(modpath, afterCleanup)
}
function afterCleanup (rimrafEr) {
if (rimrafEr) log.warn('remove', rimrafEr)
removeEmptyParents(path.resolve(pkg.path, '..'))
}
function removeEmptyParents (pkgdir) {
fs.rmdir(pkgdir, function (er) {
// FIXME: Make sure windows does what we want here
if (er && er.code !== 'ENOENT') return next()
removeEmptyParents(path.resolve(pkgdir, '..'))
})
}
}

16
website/node_modules/npm/lib/install/action/unbuild.js generated vendored Normal file
View File

@@ -0,0 +1,16 @@
'use strict'
var Bluebird = require('bluebird')
var lifecycle = Bluebird.promisify(require('../../utils/lifecycle.js'))
var packageId = require('../../utils/package-id.js')
var rmStuff = Bluebird.promisify(require('../../unbuild.js').rmStuff)
module.exports = function (staging, pkg, log) {
log.silly('unbuild', packageId(pkg))
return lifecycle(pkg.package, 'preuninstall', pkg.path, { failOk: true }).then(() => {
return lifecycle(pkg.package, 'uninstall', pkg.path, { failOk: true })
}).then(() => {
return rmStuff(pkg.package, pkg.path)
}).then(() => {
return lifecycle(pkg.package, 'postuninstall', pkg.path, { failOk: true })
})
}

192
website/node_modules/npm/lib/install/actions.js generated vendored Normal file
View File

@@ -0,0 +1,192 @@
'use strict'
const BB = require('bluebird')
const andAddParentToErrors = require('./and-add-parent-to-errors.js')
const failedDependency = require('./deps.js').failedDependency
const isInstallable = BB.promisify(require('./validate-args.js').isInstallable)
const moduleName = require('../utils/module-name.js')
const npm = require('../npm.js')
const reportOptionalFailure = require('./report-optional-failure.js')
const validate = require('aproba')
const actions = {}
actions.fetch = require('./action/fetch.js')
actions.extract = require('./action/extract.js')
actions.build = require('./action/build.js')
actions.preinstall = require('./action/preinstall.js')
actions.install = require('./action/install.js')
actions.postinstall = require('./action/postinstall.js')
actions.prepare = require('./action/prepare.js')
actions.finalize = require('./action/finalize.js')
actions.remove = require('./action/remove.js')
actions.unbuild = require('./action/unbuild.js')
actions.move = require('./action/move.js')
actions['global-install'] = require('./action/global-install.js')
actions['global-link'] = require('./action/global-link.js')
actions['refresh-package-json'] = require('./action/refresh-package-json.js')
// FIXME: We wrap actions like three ways to sunday here.
// Rewrite this to only work one way.
Object.keys(actions).forEach(function (actionName) {
var action = actions[actionName]
actions[actionName] = (staging, pkg, log) => {
validate('SOO', [staging, pkg, log])
// refuse to run actions for failed packages
if (pkg.failed) return BB.resolve()
if (action.rollback) {
if (!pkg.rollback) pkg.rollback = []
pkg.rollback.unshift(action.rollback)
}
if (action.commit) {
if (!pkg.commit) pkg.commit = []
pkg.commit.push(action.commit)
}
let actionP
if (pkg.knownInstallable) {
actionP = runAction(action, staging, pkg, log)
} else {
actionP = isInstallable(pkg.package).then(() => {
pkg.knownInstallable = true
return runAction(action, staging, pkg, log)
})
}
return actionP.then(() => {
log.finish()
}, (err) => {
return BB.fromNode((cb) => {
andAddParentToErrors(pkg.parent, cb)(err)
}).catch((err) => {
return handleOptionalDepErrors(pkg, err)
})
})
}
actions[actionName].init = action.init || (() => BB.resolve())
actions[actionName].teardown = action.teardown || (() => BB.resolve())
})
exports.actions = actions
function runAction (action, staging, pkg, log) {
return BB.fromNode((cb) => {
const result = action(staging, pkg, log, cb)
if (result && result.then) {
result.then(() => cb(), cb)
}
})
}
function markAsFailed (pkg) {
if (pkg.failed) return
pkg.failed = true
pkg.requires.forEach((req) => {
var requiredBy = req.requiredBy.filter((reqReqBy) => !reqReqBy.failed)
if (requiredBy.length === 0 && !req.userRequired) {
markAsFailed(req)
}
})
}
function handleOptionalDepErrors (pkg, err) {
markAsFailed(pkg)
var anyFatal = failedDependency(pkg)
if (anyFatal) {
throw err
} else {
reportOptionalFailure(pkg, null, err)
}
}
exports.doOne = doOne
function doOne (cmd, staging, pkg, log, next) {
validate('SSOOF', arguments)
const prepped = prepareAction([cmd, pkg], staging, log)
return withInit(actions[cmd], () => {
return execAction(prepped)
}).nodeify(next)
}
exports.doParallel = doParallel
function doParallel (type, staging, actionsToRun, log, next) {
validate('SSAOF', arguments)
const acts = actionsToRun.reduce((acc, todo) => {
if (todo[0] === type) {
acc.push(prepareAction(todo, staging, log))
}
return acc
}, [])
log.silly('doParallel', type + ' ' + acts.length)
time(log)
if (!acts.length) { return next() }
return withInit(actions[type], () => {
return BB.map(acts, execAction, {
concurrency: npm.limit.action
})
}).nodeify((err) => {
log.finish()
timeEnd(log)
next(err)
})
}
exports.doSerial = doSerial
function doSerial (type, staging, actionsToRun, log, next) {
validate('SSAOF', arguments)
log.silly('doSerial', '%s %d', type, actionsToRun.length)
runSerial(type, staging, actionsToRun, log, next)
}
exports.doReverseSerial = doReverseSerial
function doReverseSerial (type, staging, actionsToRun, log, next) {
validate('SSAOF', arguments)
log.silly('doReverseSerial', '%s %d', type, actionsToRun.length)
runSerial(type, staging, [].concat(actionsToRun).reverse(), log, next)
}
function runSerial (type, staging, actionsToRun, log, next) {
const acts = actionsToRun.reduce((acc, todo) => {
if (todo[0] === type) {
acc.push(prepareAction(todo, staging, log))
}
return acc
}, [])
time(log)
if (!acts.length) { return next() }
return withInit(actions[type], () => {
return BB.each(acts, execAction)
}).nodeify((err) => {
log.finish()
timeEnd(log)
next(err)
})
}
function time (log) {
process.emit('time', 'action:' + log.name)
}
function timeEnd (log) {
process.emit('timeEnd', 'action:' + log.name)
}
function withInit (action, body) {
return BB.using(
action.init().disposer(() => action.teardown()),
body
)
}
function prepareAction (action, staging, log) {
validate('ASO', arguments)
validate('SO', action)
var cmd = action[0]
var pkg = action[1]
if (!actions[cmd]) throw new Error('Unknown decomposed command "' + cmd + '" (is it new?)')
return [actions[cmd], staging, pkg, log.newGroup(cmd + ':' + moduleName(pkg))]
}
function execAction (todo) {
return todo[0].apply(null, todo.slice(1))
}

View File

@@ -0,0 +1,13 @@
'use strict'
var validate = require('aproba')
module.exports = function (parent, cb) {
validate('F', [cb])
return function (er) {
if (!er) return cb.apply(null, arguments)
if (er instanceof Error && parent && parent.package && parent.package.name) {
er.parent = parent.package.name
}
cb(er)
}
}

View File

@@ -0,0 +1,16 @@
'use strict'
var validate = require('aproba')
module.exports = function (tracker, cb) {
validate('OF', [tracker, cb])
return function () {
tracker.finish()
cb.apply(null, arguments)
}
}
module.exports.now = function (tracker, cb) {
validate('OF', [tracker, cb])
tracker.finish()
cb.apply(null, Array.prototype.slice.call(arguments, 2))
}

View File

@@ -0,0 +1,9 @@
'use strict'
module.exports = function (cb) {
return function () {
var args = Array.prototype.slice.call(arguments, 1)
if (args.length) args.unshift(null)
return cb.apply(null, args)
}
}

282
website/node_modules/npm/lib/install/audit.js generated vendored Normal file
View File

@@ -0,0 +1,282 @@
'use strict'
exports.generate = generate
exports.generateFromInstall = generateFromInstall
exports.submitForInstallReport = submitForInstallReport
exports.submitForFullReport = submitForFullReport
exports.printInstallReport = printInstallReport
exports.printParseableReport = printParseableReport
exports.printFullReport = printFullReport
const Bluebird = require('bluebird')
const auditReport = require('npm-audit-report')
const treeToShrinkwrap = require('../shrinkwrap.js').treeToShrinkwrap
const packageId = require('../utils/package-id.js')
const output = require('../utils/output.js')
const npm = require('../npm.js')
const qw = require('qw')
const registryFetch = require('npm-registry-fetch')
const zlib = require('zlib')
const gzip = Bluebird.promisify(zlib.gzip)
const log = require('npmlog')
const perf = require('../utils/perf.js')
const url = require('url')
const npa = require('npm-package-arg')
const uuid = require('uuid')
const ssri = require('ssri')
const cloneDeep = require('lodash.clonedeep')
const pacoteOpts = require('../config/pacote.js')
// used when scrubbing module names/specifiers
const runId = uuid.v4()
function submitForInstallReport (auditData) {
const cfg = npm.config // avoid the no-dynamic-lookups test
const scopedRegistries = cfg.keys.filter(_ => /:registry$/.test(_)).map(_ => cfg.get(_))
perf.emit('time', 'audit compress')
// TODO: registryFetch will be adding native support for `Content-Encoding: gzip` at which point
// we'll pass in something like `gzip: true` and not need to JSON stringify, gzip or headers.
return gzip(JSON.stringify(auditData)).then(body => {
perf.emit('timeEnd', 'audit compress')
log.info('audit', 'Submitting payload of ' + body.length + 'bytes')
scopedRegistries.forEach(reg => {
// we don't care about the response so destroy the stream if we can, or leave it flowing
// so it can eventually finish and clean up after itself
fetchAudit(url.resolve(reg, '/-/npm/v1/security/audits/quick'))
.then(_ => {
_.body.on('error', () => {})
if (_.body.destroy) {
_.body.destroy()
} else {
_.body.resume()
}
}, _ => {})
})
perf.emit('time', 'audit submit')
return fetchAudit('/-/npm/v1/security/audits/quick', body).then(response => {
perf.emit('timeEnd', 'audit submit')
perf.emit('time', 'audit body')
return response.json()
}).then(result => {
perf.emit('timeEnd', 'audit body')
return result
})
})
}
function submitForFullReport (auditData) {
perf.emit('time', 'audit compress')
// TODO: registryFetch will be adding native support for `Content-Encoding: gzip` at which point
// we'll pass in something like `gzip: true` and not need to JSON stringify, gzip or headers.
return gzip(JSON.stringify(auditData)).then(body => {
perf.emit('timeEnd', 'audit compress')
log.info('audit', 'Submitting payload of ' + body.length + ' bytes')
perf.emit('time', 'audit submit')
return fetchAudit('/-/npm/v1/security/audits', body).then(response => {
perf.emit('timeEnd', 'audit submit')
perf.emit('time', 'audit body')
return response.json()
}).then(result => {
perf.emit('timeEnd', 'audit body')
result.runId = runId
return result
})
})
}
function fetchAudit (href, body) {
const opts = pacoteOpts()
return registryFetch(href, {
method: 'POST',
headers: { 'content-encoding': 'gzip', 'content-type': 'application/json' },
config: npm.config,
npmSession: opts.npmSession,
projectScope: npm.projectScope,
log: log,
body: body
})
}
function printInstallReport (auditResult) {
return auditReport(auditResult, {
reporter: 'install',
withColor: npm.color,
withUnicode: npm.config.get('unicode')
}).then(result => output(result.report))
}
function printFullReport (auditResult) {
return auditReport(auditResult, {
log: output,
reporter: npm.config.get('json') ? 'json' : 'detail',
withColor: npm.color,
withUnicode: npm.config.get('unicode')
}).then(result => output(result.report))
}
function printParseableReport (auditResult) {
return auditReport(auditResult, {
log: output,
reporter: 'parseable',
withColor: npm.color,
withUnicode: npm.config.get('unicode')
}).then(result => output(result.report))
}
function generate (shrinkwrap, requires, diffs, install, remove) {
const sw = cloneDeep(shrinkwrap)
delete sw.lockfileVersion
sw.requires = scrubRequires(requires)
scrubDeps(sw.dependencies)
// sw.diffs = diffs || {}
sw.install = (install || []).map(scrubArg)
sw.remove = (remove || []).map(scrubArg)
return generateMetadata().then((md) => {
sw.metadata = md
return sw
})
}
const scrubKeys = qw`version`
const deleteKeys = qw`from resolved`
function scrubDeps (deps) {
if (!deps) return
Object.keys(deps).forEach(name => {
if (!shouldScrubName(name) && !shouldScrubSpec(name, deps[name].version)) return
const value = deps[name]
delete deps[name]
deps[scrub(name)] = value
})
Object.keys(deps).forEach(name => {
for (let toScrub of scrubKeys) {
if (!deps[name][toScrub]) continue
deps[name][toScrub] = scrubSpec(name, deps[name][toScrub])
}
for (let toDelete of deleteKeys) delete deps[name][toDelete]
scrubRequires(deps[name].requires)
scrubDeps(deps[name].dependencies)
})
}
function scrubRequires (reqs) {
if (!reqs) return reqs
Object.keys(reqs).forEach(name => {
const spec = reqs[name]
if (shouldScrubName(name) || shouldScrubSpec(name, spec)) {
delete reqs[name]
reqs[scrub(name)] = scrubSpec(name, spec)
} else {
reqs[name] = scrubSpec(name, spec)
}
})
return reqs
}
function getScope (name) {
if (name[0] === '@') return name.slice(0, name.indexOf('/'))
}
function shouldScrubName (name) {
const scope = getScope(name)
const cfg = npm.config // avoid the no-dynamic-lookups test
return Boolean(scope && cfg.get(scope + ':registry'))
}
function shouldScrubSpec (name, spec) {
const req = npa.resolve(name, spec)
return !req.registry
}
function scrubArg (arg) {
const req = npa(arg)
let name = req.name
if (shouldScrubName(name) || shouldScrubSpec(name, req.rawSpec)) {
name = scrubName(name)
}
const spec = scrubSpec(req.name, req.rawSpec)
return name + '@' + spec
}
function scrubName (name) {
return shouldScrubName(name) ? scrub(name) : name
}
function scrubSpec (name, spec) {
const req = npa.resolve(name, spec)
if (req.registry) return spec
if (req.type === 'git') {
return 'git+ssh://' + scrub(spec)
} else if (req.type === 'remote') {
return 'https://' + scrub(spec)
} else if (req.type === 'directory') {
return 'file:' + scrub(spec)
} else if (req.type === 'file') {
return 'file:' + scrub(spec) + '.tar'
} else {
return scrub(spec)
}
}
module.exports.scrub = scrub
function scrub (value, rid) {
return ssri.fromData((rid || runId) + ' ' + value, {algorithms: ['sha256']}).hexDigest()
}
function generateMetadata () {
const meta = {}
meta.npm_version = npm.version
meta.node_version = process.version
meta.platform = process.platform
meta.node_env = process.env.NODE_ENV
return Promise.resolve(meta)
}
/*
const head = path.resolve(npm.prefix, '.git/HEAD')
return readFile(head, 'utf8').then((head) => {
if (!head.match(/^ref: /)) {
meta.commit_hash = head.trim()
return
}
const headFile = head.replace(/^ref: /, '').trim()
meta.branch = headFile.replace(/^refs[/]heads[/]/, '')
return readFile(path.resolve(npm.prefix, '.git', headFile), 'utf8')
}).then((commitHash) => {
meta.commit_hash = commitHash.trim()
const proc = spawn('git', qw`diff --quiet --exit-code package.json package-lock.json`, {cwd: npm.prefix, stdio: 'ignore'})
return new Promise((resolve, reject) => {
proc.once('error', reject)
proc.on('exit', (code, signal) => {
if (signal == null) meta.state = code === 0 ? 'clean' : 'dirty'
resolve()
})
})
}).then(() => meta, () => meta)
*/
function generateFromInstall (tree, diffs, install, remove) {
const requires = {}
tree.requires.forEach((pkg) => {
requires[pkg.package.name] = tree.package.dependencies[pkg.package.name] || tree.package.devDependencies[pkg.package.name] || pkg.package.version
})
const auditInstall = (install || []).filter((a) => a.name).map(packageId)
const auditRemove = (remove || []).filter((a) => a.name).map(packageId)
const auditDiffs = {}
diffs.forEach((action) => {
const mutation = action[0]
const child = action[1]
if (mutation !== 'add' && mutation !== 'update' && mutation !== 'remove') return
if (!auditDiffs[mutation]) auditDiffs[mutation] = []
if (mutation === 'add') {
auditDiffs[mutation].push({location: child.location})
} else if (mutation === 'update') {
auditDiffs[mutation].push({location: child.location, previous: packageId(child.oldPkg)})
} else if (mutation === 'remove') {
auditDiffs[mutation].push({previous: packageId(child)})
}
})
return generate(treeToShrinkwrap(tree), requires, auditDiffs, auditInstall, auditRemove)
}

View File

@@ -0,0 +1,69 @@
'use strict'
var path = require('path')
var log = require('npmlog')
var validate = require('aproba')
var uniq = require('lodash.uniq')
var asyncMap = require('slide').asyncMap
var npm = require('../npm.js')
var exists = require('./exists.js')
var writable = require('./writable.js')
module.exports = function (actions, next) {
validate('AF', arguments)
var errors = []
asyncMap(actions, function (action, done) {
var cmd = action[0]
var pkg = action[1]
switch (cmd) {
case 'add':
hasAnyWriteAccess(path.resolve(pkg.path, '..'), errors, done)
break
case 'update':
case 'remove':
hasWriteAccess(pkg.path, errors, andHasWriteAccess(path.resolve(pkg.path, '..'), errors, done))
break
case 'move':
hasAnyWriteAccess(pkg.path, errors, andHasWriteAccess(path.resolve(pkg.fromPath, '..'), errors, done))
break
default:
done()
}
}, function () {
if (!errors.length) return next()
uniq(errors.map(function (er) { return 'Missing write access to ' + er.path })).forEach(function (er) {
log.warn('checkPermissions', er)
})
npm.config.get('force') ? next() : next(errors[0])
})
}
function andHasWriteAccess (dir, errors, done) {
validate('SAF', arguments)
return function () {
hasWriteAccess(dir, errors, done)
}
}
function hasAnyWriteAccess (dir, errors, done) {
validate('SAF', arguments)
findNearestDir()
function findNearestDir () {
var nextDir = path.resolve(dir, '..')
exists(dir, function (dirDoesntExist) {
if (!dirDoesntExist || nextDir === dir) {
return hasWriteAccess(dir, errors, done)
} else {
dir = nextDir
findNearestDir()
}
})
}
}
function hasWriteAccess (dir, errors, done) {
validate('SAF', arguments)
writable(dir, function (er) {
if (er) errors.push(er)
done()
})
}

30
website/node_modules/npm/lib/install/copy-tree.js generated vendored Normal file
View File

@@ -0,0 +1,30 @@
'use strict'
var createNode = require('./node.js').create
module.exports = function (tree) {
return copyTree(tree, {})
}
function copyTree (tree, cache) {
if (cache[tree.path]) { return cache[tree.path] }
var newTree = cache[tree.path] = createNode(Object.assign({}, tree))
copyModuleList(newTree, 'children', cache)
newTree.children.forEach(function (child) {
child.parent = newTree
})
copyModuleList(newTree, 'requires', cache)
copyModuleList(newTree, 'requiredBy', cache)
return newTree
}
function copyModuleList (tree, key, cache) {
var newList = []
if (tree[key]) {
tree[key].forEach(function (child) {
const copy = copyTree(child, cache)
if (copy) {
newList.push(copy)
}
})
}
tree[key] = newList
}

View File

@@ -0,0 +1,79 @@
'use strict'
var validate = require('aproba')
var npm = require('../npm.js')
module.exports = function (differences, decomposed, next) {
validate('AAF', arguments)
differences.forEach((action) => {
var cmd = action[0]
var pkg = action[1]
switch (cmd) {
case 'add':
addSteps(decomposed, pkg)
break
case 'update':
updateSteps(decomposed, pkg)
break
case 'move':
moveSteps(decomposed, pkg)
break
case 'remove':
removeSteps(decomposed, pkg)
break
default:
defaultSteps(decomposed, cmd, pkg)
}
})
next()
}
function addAction (decomposed, action, pkg) {
if (decomposed.some((_) => _[0] === action && _[1] === pkg)) return
decomposed.push([action, pkg])
}
function addSteps (decomposed, pkg) {
if (pkg.fromBundle) {
// make sure our source module exists to extract ourselves from
// if we're installing our source module anyway, the duplication
// of these steps will be elided by `addAction` automatically
addAction(decomposed, 'fetch', pkg.fromBundle)
addAction(decomposed, 'extract', pkg.fromBundle)
}
if (!pkg.fromBundle && !pkg.isLink) {
addAction(decomposed, 'fetch', pkg)
addAction(decomposed, 'extract', pkg)
}
if (!pkg.fromBundle || npm.config.get('rebuild-bundle')) {
addAction(decomposed, 'preinstall', pkg)
addAction(decomposed, 'build', pkg)
addAction(decomposed, 'install', pkg)
addAction(decomposed, 'postinstall', pkg)
}
if (!pkg.fromBundle || !pkg.isLink) {
addAction(decomposed, 'finalize', pkg)
}
addAction(decomposed, 'refresh-package-json', pkg)
}
function updateSteps (decomposed, pkg) {
removeSteps(decomposed, pkg.oldPkg)
addSteps(decomposed, pkg)
}
function removeSteps (decomposed, pkg) {
addAction(decomposed, 'unbuild', pkg)
addAction(decomposed, 'remove', pkg)
}
function moveSteps (decomposed, pkg) {
addAction(decomposed, 'move', pkg)
addAction(decomposed, 'build', pkg)
addAction(decomposed, 'install', pkg)
addAction(decomposed, 'postinstall', pkg)
addAction(decomposed, 'refresh-package-json', pkg)
}
function defaultSteps (decomposed, cmd, pkg) {
addAction(decomposed, cmd, pkg)
}

816
website/node_modules/npm/lib/install/deps.js generated vendored Normal file
View File

@@ -0,0 +1,816 @@
'use strict'
const BB = require('bluebird')
var fs = require('fs')
var assert = require('assert')
var path = require('path')
var semver = require('semver')
var asyncMap = require('slide').asyncMap
var chain = require('slide').chain
var iferr = require('iferr')
var npa = require('npm-package-arg')
var validate = require('aproba')
var dezalgo = require('dezalgo')
var fetchPackageMetadata = require('../fetch-package-metadata.js')
var andAddParentToErrors = require('./and-add-parent-to-errors.js')
var addBundled = require('../fetch-package-metadata.js').addBundled
var readShrinkwrap = require('./read-shrinkwrap.js')
var inflateShrinkwrap = require('./inflate-shrinkwrap.js')
var inflateBundled = require('./inflate-bundled.js')
var andFinishTracker = require('./and-finish-tracker.js')
var npm = require('../npm.js')
var flatNameFromTree = require('./flatten-tree.js').flatNameFromTree
var createChild = require('./node.js').create
var resetMetadata = require('./node.js').reset
var isInstallable = require('./validate-args.js').isInstallable
var packageId = require('../utils/package-id.js')
var moduleName = require('../utils/module-name.js')
var isDevDep = require('./is-dev-dep.js')
var isProdDep = require('./is-prod-dep.js')
var reportOptionalFailure = require('./report-optional-failure.js')
var getSaveType = require('./save.js').getSaveType
var unixFormatPath = require('../utils/unix-format-path.js')
var isExtraneous = require('./is-extraneous.js')
var isRegistry = require('../utils/is-registry.js')
var hasModernMeta = require('./has-modern-meta.js')
// The export functions in this module mutate a dependency tree, adding
// items to them.
var registryTypes = { range: true, version: true }
function doesChildVersionMatch (child, requested, requestor) {
if (child.fromShrinkwrap && !child.hasRequiresFromLock) return true
// ranges of * ALWAYS count as a match, because when downloading we allow
// prereleases to match * if there are ONLY prereleases
if (requested.type === 'range' && requested.fetchSpec === '*') return true
if (requested.type === 'directory') {
if (!child.isLink) return false
return path.relative(child.realpath, requested.fetchSpec) === ''
}
if (requested.type === 'git' && child.fromShrinkwrap) {
const fromSw = child.package._from ? npa(child.package._from) : child.fromShrinkwrap
fromSw.name = requested.name // we're only checking specifiers here
if (fromSw.toString() === requested.toString()) return true
}
if (!registryTypes[requested.type]) {
var childReq = child.package._requested
if (childReq) {
if (childReq.rawSpec === requested.rawSpec) return true
if (childReq.type === requested.type && childReq.saveSpec === requested.saveSpec) return true
}
// If _requested didn't exist OR if it didn't match then we'll try using
// _from. We pass it through npa to normalize the specifier.
// This can happen when installing from an `npm-shrinkwrap.json` where `_requested` will
// be the tarball URL from `resolved` and thus can't match what's in the `package.json`.
// In those cases _from, will be preserved and we can compare that to ensure that they
// really came from the same sources.
// You'll see this scenario happen with at least tags and git dependencies.
// Some buggy clients will write spaces into the module name part of a _from.
if (child.package._from) {
var fromReq = npa.resolve(moduleName(child), child.package._from.replace(new RegExp('^\\s*' + moduleName(child) + '\\s*@'), ''))
if (fromReq.rawSpec === requested.rawSpec) return true
if (fromReq.type === requested.type && fromReq.saveSpec && fromReq.saveSpec === requested.saveSpec) return true
}
return false
}
try {
return semver.satisfies(child.package.version, requested.fetchSpec, true)
} catch (e) {
return false
}
}
function childDependencySpecifier (tree, name, spec, where) {
return npa.resolve(name, spec, where || packageRelativePath(tree))
}
exports.computeMetadata = computeMetadata
function computeMetadata (tree, seen) {
if (!seen) seen = new Set()
if (!tree || seen.has(tree)) return
seen.add(tree)
if (tree.parent == null) {
resetMetadata(tree)
tree.isTop = true
}
tree.location = flatNameFromTree(tree)
function findChild (name, spec, kind) {
try {
var req = childDependencySpecifier(tree, name, spec)
} catch (err) {
return
}
var child = findRequirement(tree, req.name, req)
if (child) {
resolveWithExistingModule(child, tree)
return true
}
}
const deps = tree.package.dependencies || {}
const reqs = tree.swRequires || {}
for (let name of Object.keys(deps)) {
if (findChild(name, deps[name])) continue
if (name in reqs && findChild(name, reqs[name])) continue
tree.missingDeps[name] = deps[name]
}
if (tree.isTop) {
const devDeps = tree.package.devDependencies || {}
for (let name of Object.keys(devDeps)) {
if (findChild(name, devDeps[name])) continue
tree.missingDevDeps[name] = devDeps[name]
}
}
tree.children.filter((child) => !child.removed).forEach((child) => computeMetadata(child, seen))
return tree
}
function isDep (tree, child) {
var name = moduleName(child)
var prodVer = isProdDep(tree, name)
var devVer = isDevDep(tree, name)
try {
var prodSpec = childDependencySpecifier(tree, name, prodVer)
} catch (err) {
return {isDep: true, isProdDep: false, isDevDep: false}
}
var matches
if (prodSpec) matches = doesChildVersionMatch(child, prodSpec, tree)
if (matches) return {isDep: true, isProdDep: prodSpec, isDevDep: false}
if (devVer === prodVer) return {isDep: child.fromShrinkwrap, isProdDep: false, isDevDep: false}
try {
var devSpec = childDependencySpecifier(tree, name, devVer)
return {isDep: doesChildVersionMatch(child, devSpec, tree) || child.fromShrinkwrap, isProdDep: false, isDevDep: devSpec}
} catch (err) {
return {isDep: child.fromShrinkwrap, isProdDep: false, isDevDep: false}
}
}
function addRequiredDep (tree, child) {
var dep = isDep(tree, child)
if (!dep.isDep) return false
replaceModuleByPath(child, 'requiredBy', tree)
replaceModuleByName(tree, 'requires', child)
if (dep.isProdDep && tree.missingDeps) delete tree.missingDeps[moduleName(child)]
if (dep.isDevDep && tree.missingDevDeps) delete tree.missingDevDeps[moduleName(child)]
return true
}
exports.removeObsoleteDep = removeObsoleteDep
function removeObsoleteDep (child, log) {
if (child.removed) return
child.removed = true
if (log) {
log.silly('removeObsoleteDep', 'removing ' + packageId(child) +
' from the tree as its been replaced by a newer version or is no longer required')
}
// remove from physical tree
if (child.parent) {
child.parent.children = child.parent.children.filter(function (pchild) { return pchild !== child })
}
// remove from logical tree
var requires = child.requires || []
requires.forEach(function (requirement) {
requirement.requiredBy = requirement.requiredBy.filter(function (reqBy) { return reqBy !== child })
// we don't just check requirement.requires because that doesn't account
// for circular deps. isExtraneous does.
if (isExtraneous(requirement)) removeObsoleteDep(requirement, log)
})
}
function packageRelativePath (tree) {
if (!tree) return ''
var requested = tree.package._requested || {}
var isLocal = requested.type === 'directory' || requested.type === 'file'
return isLocal ? requested.fetchSpec
: (tree.isLink || tree.isInLink) && !preserveSymlinks() ? tree.realpath
: tree.path
}
function matchingDep (tree, name) {
if (!tree || !tree.package) return
if (tree.package.dependencies && tree.package.dependencies[name]) return tree.package.dependencies[name]
if (tree.package.devDependencies && tree.package.devDependencies[name]) return tree.package.devDependencies[name]
}
exports.getAllMetadata = function (args, tree, where, next) {
asyncMap(args, function (arg, done) {
let spec
try {
spec = npa(arg)
} catch (e) {
return done(e)
}
if (spec.type !== 'file' && spec.type !== 'directory' && (spec.name == null || spec.rawSpec === '')) {
return fs.stat(path.join(arg, 'package.json'), (err) => {
if (err) {
var version = matchingDep(tree, spec.name)
if (version) {
try {
return fetchPackageMetadata(npa.resolve(spec.name, version), where, done)
} catch (e) {
return done(e)
}
} else {
return fetchPackageMetadata(spec, where, done)
}
} else {
try {
return fetchPackageMetadata(npa('file:' + arg), where, done)
} catch (e) {
return done(e)
}
}
})
} else {
return fetchPackageMetadata(spec, where, done)
}
}, next)
}
// Add a list of args to tree's top level dependencies
exports.loadRequestedDeps = function (args, tree, saveToDependencies, log, next) {
validate('AOOF', [args, tree, log, next])
asyncMap(args, function (pkg, done) {
var depLoaded = andAddParentToErrors(tree, done)
resolveWithNewModule(pkg, tree, log.newGroup('loadRequestedDeps'), iferr(depLoaded, function (child, tracker) {
validate('OO', arguments)
if (npm.config.get('global')) {
child.isGlobal = true
}
var childName = moduleName(child)
child.saveSpec = computeVersionSpec(tree, child)
child.userRequired = true
child.save = getSaveType(tree, child)
const types = ['dependencies', 'devDependencies', 'optionalDependencies']
if (child.save) {
tree.package[child.save][childName] = child.saveSpec
// Astute readers might notice that this exact same code exists in
// save.js under a different guise. That code is responsible for deps
// being removed from the final written `package.json`. The removal in
// this function is specifically to prevent "installed as both X and Y"
// warnings when moving an existing dep between different dep fields.
//
// Or, try it by removing this loop, and do `npm i -P x && npm i -D x`
for (let saveType of types) {
if (child.save !== saveType) {
delete tree.package[saveType][childName]
}
}
if (child.save === 'optionalDependencies') tree.package.dependencies[childName] = child.saveSpec
}
// For things the user asked to install, that aren't a dependency (or
// won't be when we're done), flag it as "depending" on the user
// themselves, so we don't remove it as a dep that no longer exists
var childIsDep = addRequiredDep(tree, child)
if (!childIsDep) child.userRequired = true
depLoaded(null, child, tracker)
}))
}, andForEachChild(loadDeps, andFinishTracker(log, next)))
}
function isNotEmpty (value) {
return value != null && value !== ''
}
exports.computeVersionSpec = computeVersionSpec
function computeVersionSpec (tree, child) {
validate('OO', arguments)
var requested
var childReq = child.package._requested
if (child.isLink) {
requested = npa.resolve(child.package.name, 'file:' + child.realpath, getTop(tree).path)
} else if (childReq && (isNotEmpty(childReq.saveSpec) || (isNotEmpty(childReq.rawSpec) && isNotEmpty(childReq.fetchSpec)))) {
requested = child.package._requested
} else if (child.package._from) {
requested = npa(child.package._from, tree.path)
} else {
requested = npa.resolve(child.package.name, child.package.version)
}
if (isRegistry(requested)) {
var version = child.package.version
var rangeDescriptor = ''
if (semver.valid(version, true) &&
semver.gte(version, '0.1.0', true) &&
!npm.config.get('save-exact')) {
rangeDescriptor = npm.config.get('save-prefix')
}
return rangeDescriptor + version
} else if (requested.type === 'directory' || requested.type === 'file') {
return 'file:' + unixFormatPath(path.relative(getTop(tree).path, requested.fetchSpec))
} else {
return requested.saveSpec || requested.rawSpec
}
}
function moduleNameMatches (name) {
return function (child) { return moduleName(child) === name }
}
// while this implementation does not require async calling, doing so
// gives this a consistent interface with loadDeps et al
exports.removeDeps = function (args, tree, saveToDependencies, next) {
validate('AOSF|AOZF', [args, tree, saveToDependencies, next])
for (let pkg of args) {
var pkgName = moduleName(pkg)
var toRemove = tree.children.filter(moduleNameMatches(pkgName))
var pkgToRemove = toRemove[0] || createChild({package: {name: pkgName}})
var saveType = getSaveType(tree, pkg) || 'dependencies'
if (tree.isTop && saveToDependencies) {
pkgToRemove.save = saveType
}
if (tree.package[saveType][pkgName]) {
delete tree.package[saveType][pkgName]
if (saveType === 'optionalDependencies' && tree.package.dependencies[pkgName]) {
delete tree.package.dependencies[pkgName]
}
}
replaceModuleByPath(tree, 'removedChildren', pkgToRemove)
for (let parent of pkgToRemove.requiredBy) {
parent.requires = parent.requires.filter((child) => child !== pkgToRemove)
}
pkgToRemove.requiredBy = pkgToRemove.requiredBy.filter((parent) => parent !== tree)
flagAsRemoving(pkgToRemove)
}
next()
}
function flagAsRemoving (toRemove, seen) {
if (!seen) seen = new Set()
if (seen.has(toRemove)) return
seen.add(toRemove)
toRemove.removing = true
toRemove.requires.forEach((required) => {
flagAsRemoving(required, seen)
})
}
exports.removeExtraneous = function (args, tree, next) {
for (let pkg of args) {
var pkgName = moduleName(pkg)
var toRemove = tree.children.filter(moduleNameMatches(pkgName))
if (toRemove.length) {
removeObsoleteDep(toRemove[0])
}
}
next()
}
function andForEachChild (load, next) {
validate('F', [next])
next = dezalgo(next)
return function (er, children, logs) {
// when children is empty, logs won't be passed in at all (asyncMap is weird)
// so shortcircuit before arg validation
if (!er && (!children || children.length === 0)) return next()
validate('EAA', arguments)
if (er) return next(er)
assert(children.length === logs.length)
var cmds = []
for (var ii = 0; ii < children.length; ++ii) {
cmds.push([load, children[ii], logs[ii]])
}
var sortedCmds = cmds.sort(function installOrder (aa, bb) {
return moduleName(aa[1]).localeCompare(moduleName(bb[1]))
})
chain(sortedCmds, next)
}
}
function isDepOptional (tree, name, pkg) {
if (pkg.package && pkg.package._optional) return true
const optDeps = tree.package.optionalDependencies
if (optDeps && optDeps[name] != null) return true
const devDeps = tree.package.devDependencies
if (devDeps && devDeps[name] != null) {
const includeDev = npm.config.get('dev') ||
(!/^prod(uction)?$/.test(npm.config.get('only')) && !npm.config.get('production')) ||
/^dev(elopment)?$/.test(npm.config.get('only')) ||
/^dev(elopment)?$/.test(npm.config.get('also'))
return !includeDev
}
const prodDeps = tree.package.dependencies
if (prodDeps && prodDeps[name] != null) {
const includeProd = !/^dev(elopment)?$/.test(npm.config.get('only'))
return !includeProd
}
return false
}
exports.failedDependency = failedDependency
function failedDependency (tree, name, pkg) {
if (name) {
if (isDepOptional(tree, name, pkg || {})) {
return false
}
}
tree.failed = true
if (tree.isTop) return true
if (tree.userRequired) return true
if (!tree.requiredBy) return false
let anyFailed = false
for (var ii = 0; ii < tree.requiredBy.length; ++ii) {
var requireParent = tree.requiredBy[ii]
if (failedDependency(requireParent, moduleName(tree), tree)) {
anyFailed = true
}
}
return anyFailed
}
function andHandleOptionalErrors (log, tree, name, done) {
validate('OOSF', arguments)
return function (er, child, childLog) {
if (!er) validate('OO', [child, childLog])
if (!er) return done(er, child, childLog)
var isFatal = failedDependency(tree, name)
if (er && !isFatal) {
reportOptionalFailure(tree, name, er)
return done()
} else {
return done(er, child, childLog)
}
}
}
exports.prefetchDeps = prefetchDeps
function prefetchDeps (tree, deps, log, next) {
validate('OOOF', arguments)
var skipOptional = !npm.config.get('optional')
var seen = new Set()
const finished = andFinishTracker(log, next)
const fpm = BB.promisify(fetchPackageMetadata)
resolveBranchDeps(tree.package, deps).then(
() => finished(), finished
)
function resolveBranchDeps (pkg, deps) {
return BB.resolve(null).then(() => {
var allDependencies = Object.keys(deps).map((dep) => {
return npa.resolve(dep, deps[dep])
}).filter((dep) => {
return isRegistry(dep) &&
!seen.has(dep.toString()) &&
!findRequirement(tree, dep.name, dep)
})
if (skipOptional) {
var optDeps = pkg.optionalDependencies || {}
allDependencies = allDependencies.filter((dep) => !optDeps[dep.name])
}
return BB.map(allDependencies, (dep) => {
seen.add(dep.toString())
return fpm(dep, '', {tracker: log.newItem('fetchMetadata')}).then(
(pkg) => {
return pkg && pkg.dependencies && resolveBranchDeps(pkg, pkg.dependencies)
},
() => null
)
})
})
}
}
// Load any missing dependencies in the given tree
exports.loadDeps = loadDeps
function loadDeps (tree, log, next) {
validate('OOF', arguments)
if (tree.loaded || (tree.parent && tree.parent.failed) || tree.removed) return andFinishTracker.now(log, next)
if (tree.parent) tree.loaded = true
if (!tree.package.dependencies) tree.package.dependencies = {}
asyncMap(Object.keys(tree.package.dependencies), function (dep, done) {
var version = tree.package.dependencies[dep]
addDependency(dep, version, tree, log.newGroup('loadDep:' + dep), andHandleOptionalErrors(log, tree, dep, done))
}, andForEachChild(loadDeps, andFinishTracker(log, next)))
}
// Load development dependencies into the given tree
exports.loadDevDeps = function (tree, log, next) {
validate('OOF', arguments)
if (!tree.package.devDependencies) return andFinishTracker.now(log, next)
asyncMap(Object.keys(tree.package.devDependencies), function (dep, done) {
// things defined as both dev dependencies and regular dependencies are treated
// as the former
if (tree.package.dependencies[dep]) return done()
var logGroup = log.newGroup('loadDevDep:' + dep)
addDependency(dep, tree.package.devDependencies[dep], tree, logGroup, andHandleOptionalErrors(log, tree, dep, done))
}, andForEachChild(loadDeps, andFinishTracker(log, next)))
}
var loadExtraneous = exports.loadExtraneous = function (tree, log, next) {
var seen = new Set()
function loadExtraneous (tree) {
if (seen.has(tree)) return
seen.add(tree)
for (var child of tree.children) {
if (child.loaded) continue
resolveWithExistingModule(child, tree)
loadExtraneous(child)
}
}
loadExtraneous(tree)
log.finish()
next()
}
exports.loadExtraneous.andResolveDeps = function (tree, log, next) {
validate('OOF', arguments)
// For canonicalized trees (eg from shrinkwrap) we don't want to bother
// resolving the dependencies of extraneous deps.
if (tree.loaded) return loadExtraneous(tree, log, next)
asyncMap(tree.children.filter(function (child) { return !child.loaded }), function (child, done) {
resolveWithExistingModule(child, tree)
done(null, child, log)
}, andForEachChild(loadDeps, andFinishTracker(log, next)))
}
function addDependency (name, versionSpec, tree, log, done) {
validate('SSOOF', arguments)
var next = andAddParentToErrors(tree, done)
try {
var req = childDependencySpecifier(tree, name, versionSpec)
if (tree.swRequires && tree.swRequires[name]) {
var swReq = childDependencySpecifier(tree, name, tree.swRequires[name], tree.package._where)
}
} catch (err) {
return done(err)
}
var child = findRequirement(tree, name, req)
if (!child && swReq) child = findRequirement(tree, name, swReq)
if (hasModernMeta(child)) {
resolveWithExistingModule(child, tree)
if (child.package._shrinkwrap === undefined) {
readShrinkwrap.andInflate(child, function (er) { next(er, child, log) })
} else {
next(null, child, log)
}
} else {
if (child) {
if (req.registry) {
req = childDependencySpecifier(tree, name, child.package.version)
}
if (child.fromBundle) reportBundleOverride(child, log)
removeObsoleteDep(child, log)
}
fetchPackageMetadata(req, packageRelativePath(tree), {tracker: log.newItem('fetchMetadata')}, iferr(next, function (pkg) {
resolveWithNewModule(pkg, tree, log, next)
}))
}
}
function getTop (pkg) {
const seen = new Set()
while (pkg.parent && !seen.has(pkg.parent)) {
pkg = pkg.parent
seen.add(pkg)
}
return pkg
}
function reportBundleOverride (child, log) {
const code = 'EBUNDLEOVERRIDE'
const top = getTop(child.fromBundle)
const bundlerId = packageId(child.fromBundle)
if (!top.warnings.some((w) => {
return w.code === code
})) {
const err = new Error(`${bundlerId} had bundled packages that do not match the required version(s). They have been replaced with non-bundled versions.`)
err.code = code
top.warnings.push(err)
}
if (log) log.verbose('bundle', `${code}: Replacing ${bundlerId}'s bundled version of ${moduleName(child)} with ${packageId(child)}.`)
}
function resolveWithExistingModule (child, tree) {
validate('OO', arguments)
addRequiredDep(tree, child)
if (tree.parent && child.parent !== tree) updatePhantomChildren(tree.parent, child)
}
var updatePhantomChildren = exports.updatePhantomChildren = function (current, child) {
validate('OO', arguments)
while (current && current !== child.parent) {
if (!current.phantomChildren) current.phantomChildren = {}
current.phantomChildren[moduleName(child)] = child
current = current.parent
}
}
exports._replaceModuleByPath = replaceModuleByPath
function replaceModuleByPath (obj, key, child) {
return replaceModule(obj, key, child, function (replacing, child) {
return replacing.path === child.path
})
}
exports._replaceModuleByName = replaceModuleByName
function replaceModuleByName (obj, key, child) {
var childName = moduleName(child)
return replaceModule(obj, key, child, function (replacing, child) {
return moduleName(replacing) === childName
})
}
function replaceModule (obj, key, child, matchBy) {
validate('OSOF', arguments)
if (!obj[key]) obj[key] = []
// we replace children with a new array object instead of mutating it
// because mutating it results in weird failure states.
// I would very much like to know _why_ this is. =/
var children = [].concat(obj[key])
for (var replaceAt = 0; replaceAt < children.length; ++replaceAt) {
if (matchBy(children[replaceAt], child)) break
}
var replacing = children.splice(replaceAt, 1, child)
obj[key] = children
return replacing[0]
}
function resolveWithNewModule (pkg, tree, log, next) {
validate('OOOF', arguments)
log.silly('resolveWithNewModule', packageId(pkg), 'checking installable status')
return isInstallable(pkg, (err) => {
let installable = !err
addBundled(pkg, (bundleErr) => {
var parent = earliestInstallable(tree, tree, pkg, log) || tree
var isLink = pkg._requested.type === 'directory'
var child = createChild({
package: pkg,
parent: parent,
path: path.join(parent.isLink ? parent.realpath : parent.path, 'node_modules', pkg.name),
realpath: isLink ? pkg._requested.fetchSpec : path.join(parent.realpath, 'node_modules', pkg.name),
children: pkg._bundled || [],
isLink: isLink,
isInLink: parent.isLink,
knownInstallable: installable
})
if (!installable || bundleErr) child.failed = true
delete pkg._bundled
var hasBundled = child.children.length
var replaced = replaceModuleByName(parent, 'children', child)
if (replaced) {
if (replaced.fromBundle) reportBundleOverride(replaced, log)
removeObsoleteDep(replaced)
}
addRequiredDep(tree, child)
child.location = flatNameFromTree(child)
if (tree.parent && parent !== tree) updatePhantomChildren(tree.parent, child)
if (hasBundled) {
inflateBundled(child, child, child.children)
}
if (pkg._shrinkwrap && pkg._shrinkwrap.dependencies) {
return inflateShrinkwrap(child, pkg._shrinkwrap, (swErr) => {
if (swErr) child.failed = true
next(err || bundleErr || swErr, child, log)
})
}
next(err || bundleErr, child, log)
})
})
}
var validatePeerDeps = exports.validatePeerDeps = function (tree, onInvalid) {
if (!tree.package.peerDependencies) return
Object.keys(tree.package.peerDependencies).forEach(function (pkgname) {
var version = tree.package.peerDependencies[pkgname]
try {
var spec = npa.resolve(pkgname, version)
} catch (e) {}
var match = spec && findRequirement(tree.parent || tree, pkgname, spec)
if (!match) onInvalid(tree, pkgname, version)
})
}
exports.validateAllPeerDeps = function (tree, onInvalid) {
validateAllPeerDeps(tree, onInvalid, new Set())
}
function validateAllPeerDeps (tree, onInvalid, seen) {
validate('OFO', arguments)
if (seen.has(tree)) return
seen.add(tree)
validatePeerDeps(tree, onInvalid)
tree.children.forEach(function (child) { validateAllPeerDeps(child, onInvalid, seen) })
}
// Determine if a module requirement is already met by the tree at or above
// our current location in the tree.
var findRequirement = exports.findRequirement = function (tree, name, requested, requestor) {
validate('OSO', [tree, name, requested])
if (!requestor) requestor = tree
var nameMatch = function (child) {
return moduleName(child) === name && child.parent && !child.removed
}
var versionMatch = function (child) {
return doesChildVersionMatch(child, requested, requestor)
}
if (nameMatch(tree)) {
// this *is* the module, but it doesn't match the version, so a
// new copy will have to be installed
return versionMatch(tree) ? tree : null
}
var matches = tree.children.filter(nameMatch)
if (matches.length) {
matches = matches.filter(versionMatch)
// the module exists as a dependent, but the version doesn't match, so
// a new copy will have to be installed above here
if (matches.length) return matches[0]
return null
}
if (tree.isTop) return null
if (!preserveSymlinks() && /^[.][.][\\/]/.test(path.relative(tree.parent.realpath, tree.realpath))) return null
return findRequirement(tree.parent, name, requested, requestor)
}
function preserveSymlinks () {
if (!('NODE_PRESERVE_SYMLINKS' in process.env)) return false
const value = process.env.NODE_PRESERVE_SYMLINKS
if (value == null || value === '' || value === 'false' || value === 'no' || value === '0') return false
return true
}
// Find the highest level in the tree that we can install this module in.
// If the module isn't installed above us yet, that'd be the very top.
// If it is, then it's the level below where its installed.
var earliestInstallable = exports.earliestInstallable = function (requiredBy, tree, pkg, log) {
validate('OOOO', arguments)
function undeletedModuleMatches (child) {
return !child.removed && moduleName(child) === pkg.name
}
const undeletedMatches = tree.children.filter(undeletedModuleMatches)
if (undeletedMatches.length) {
// if there's a conflict with another child AT THE SAME level then we're replacing it, so
// mark it as removed and continue with resolution normally.
if (tree === requiredBy) {
undeletedMatches.forEach((pkg) => {
if (pkg.fromBundle) reportBundleOverride(pkg, log)
removeObsoleteDep(pkg, log)
})
} else {
return null
}
}
// If any of the children of this tree have conflicting
// binaries then we need to decline to install this package here.
var binaryMatches = pkg.bin && tree.children.some(function (child) {
if (child.removed || !child.package.bin) return false
return Object.keys(child.package.bin).some(function (bin) {
return pkg.bin[bin]
})
})
if (binaryMatches) return null
// if this tree location requested the same module then we KNOW it
// isn't compatible because if it were findRequirement would have
// found that version.
var deps = tree.package.dependencies || {}
if (!tree.removed && requiredBy !== tree && deps[pkg.name]) {
return null
}
var devDeps = tree.package.devDependencies || {}
if (tree.isTop && devDeps[pkg.name]) {
var requested = childDependencySpecifier(tree, pkg.name, devDeps[pkg.name])
if (!doesChildVersionMatch({package: pkg}, requested, tree)) {
return null
}
}
if (tree.phantomChildren && tree.phantomChildren[pkg.name]) return null
if (tree.isTop) return tree
if (tree.isGlobal) return tree
if (npm.config.get('global-style') && tree.parent.isTop) return tree
if (npm.config.get('legacy-bundling')) return tree
if (!preserveSymlinks() && /^[.][.][\\/]/.test(path.relative(tree.parent.realpath, tree.realpath))) return tree
return (earliestInstallable(requiredBy, tree.parent, pkg, log) || tree)
}

260
website/node_modules/npm/lib/install/diff-trees.js generated vendored Normal file
View File

@@ -0,0 +1,260 @@
'use strict'
var npm = require('../npm.js')
var validate = require('aproba')
var npa = require('npm-package-arg')
var flattenTree = require('./flatten-tree.js')
var isOnlyDev = require('./is-only-dev.js')
var log = require('npmlog')
var path = require('path')
var ssri = require('ssri')
var moduleName = require('../utils/module-name.js')
var isOnlyOptional = require('./is-only-optional.js')
// we don't use get-requested because we're operating on files on disk, and
// we don't want to extropolate from what _should_ be there.
function pkgRequested (pkg) {
return pkg._requested || (pkg._resolved && npa(pkg._resolved)) || (pkg._from && npa(pkg._from))
}
function nonRegistrySource (requested) {
if (fromGit(requested)) return true
if (fromLocal(requested)) return true
if (fromRemote(requested)) return true
return false
}
function fromRemote (requested) {
if (requested.type === 'remote') return true
}
function fromLocal (requested) {
// local is an npm@3 type that meant "file"
if (requested.type === 'file' || requested.type === 'directory' || requested.type === 'local') return true
return false
}
function fromGit (requested) {
if (requested.type === 'hosted' || requested.type === 'git') return true
return false
}
function pkgIntegrity (pkg) {
try {
// dist is provided by the registry
var sri = (pkg.dist && pkg.dist.integrity) ||
// _integrity is provided by pacote
pkg._integrity ||
// _shasum is legacy
(pkg._shasum && ssri.fromHex(pkg._shasum, 'sha1').toString())
if (!sri) return
var integrity = ssri.parse(sri)
if (Object.keys(integrity).length === 0) return
return integrity
} catch (ex) {
}
}
function sriMatch (aa, bb) {
if (!aa || !bb) return false
for (let algo of Object.keys(aa)) {
if (!bb[algo]) continue
for (let aaHash of aa[algo]) {
for (let bbHash of bb[algo]) {
return aaHash.digest === bbHash.digest
}
}
}
return false
}
function pkgAreEquiv (aa, bb) {
// coming in we know they share a path…
// if one is inside a link and the other is not, then they are not equivalent
// this happens when we're replacing a linked dep with a non-linked version
if (aa.isInLink !== bb.isInLink) return false
// if they share package metadata _identity_, they're the same thing
if (aa.package === bb.package) return true
// if they share integrity information, they're the same thing
var aaIntegrity = pkgIntegrity(aa.package)
var bbIntegrity = pkgIntegrity(bb.package)
if (aaIntegrity || bbIntegrity) return sriMatch(aaIntegrity, bbIntegrity)
// if they're links and they share the same target, they're the same thing
if (aa.isLink && bb.isLink) return aa.realpath === bb.realpath
// if we can't determine both their sources then we have no way to know
// if they're the same thing, so we have to assume they aren't
var aaReq = pkgRequested(aa.package)
var bbReq = pkgRequested(bb.package)
if (!aaReq || !bbReq) return false
if (fromGit(aaReq) && fromGit(bbReq)) {
// if both are git and share a _resolved specifier (one with the
// comittish replaced by a commit hash) then they're the same
return aa.package._resolved && bb.package._resolved &&
aa.package._resolved === bb.package._resolved
}
// we have to give up trying to find matches for non-registry sources at this point…
if (nonRegistrySource(aaReq) || nonRegistrySource(bbReq)) return false
// finally, if they ARE a registry source then version matching counts
return aa.package.version === bb.package.version
}
function pushAll (aa, bb) {
Array.prototype.push.apply(aa, bb)
}
module.exports = function (oldTree, newTree, differences, log, next) {
validate('OOAOF', arguments)
pushAll(differences, sortActions(diffTrees(oldTree, newTree)))
log.finish()
next()
}
function isNotTopOrExtraneous (node) {
return !node.isTop && !node.userRequired && !node.existing
}
var sortActions = module.exports.sortActions = function (differences) {
var actions = {}
differences.forEach(function (action) {
var child = action[1]
actions[child.location] = action
})
var sorted = []
var added = {}
var sortedlocs = Object.keys(actions).sort(sortByLocation)
// We're going to sort the actions taken on top level dependencies first, before
// considering the order of transitive deps. Because we're building our list
// from the bottom up, this means we will return a list with top level deps LAST.
// This is important in terms of keeping installations as consistent as possible
// as folks add new dependencies.
var toplocs = sortedlocs.filter(function (location) {
var mod = actions[location][1]
if (!mod.requiredBy) return true
// If this module is required by any non-top level module
// or by any extraneous module, eg user requested or existing
// then we don't want to give this priority sorting.
return !mod.requiredBy.some(isNotTopOrExtraneous)
})
toplocs.concat(sortedlocs).forEach(function (location) {
sortByDeps(actions[location])
})
function sortByLocation (aa, bb) {
return bb.localeCompare(aa)
}
function sortModuleByLocation (aa, bb) {
return sortByLocation(aa && aa.location, bb && bb.location)
}
function sortByDeps (action) {
var mod = action[1]
if (added[mod.location]) return
added[mod.location] = action
if (!mod.requiredBy) mod.requiredBy = []
mod.requiredBy.sort(sortModuleByLocation).forEach(function (mod) {
if (actions[mod.location]) sortByDeps(actions[mod.location])
})
sorted.unshift(action)
}
// safety net, anything excluded above gets tacked on the end
differences.forEach((_) => {
if (sorted.indexOf(_) === -1) sorted.push(_)
})
return sorted
}
function setAction (differences, action, pkg) {
differences.push([action, pkg])
}
var diffTrees = module.exports._diffTrees = function (oldTree, newTree) {
validate('OO', arguments)
var differences = []
var flatOldTree = flattenTree(oldTree)
var flatNewTree = flattenTree(newTree)
var toRemove = {}
var toRemoveByName = {}
// Build our tentative remove list. We don't add remove actions yet
// because we might resuse them as part of a move.
Object.keys(flatOldTree).forEach(function (flatname) {
if (flatname === '/') return
if (flatNewTree[flatname]) return
var pkg = flatOldTree[flatname]
if (pkg.isInLink && /^[.][.][/\\]/.test(path.relative(newTree.realpath, pkg.realpath))) return
toRemove[flatname] = pkg
var name = moduleName(pkg)
if (!toRemoveByName[name]) toRemoveByName[name] = []
toRemoveByName[name].push({flatname: flatname, pkg: pkg})
})
// generate our add/update/move actions
Object.keys(flatNewTree).forEach(function (flatname) {
if (flatname === '/') return
var pkg = flatNewTree[flatname]
var oldPkg = pkg.oldPkg = flatOldTree[flatname]
if (oldPkg) {
// if the versions are equivalent then we don't need to update… unless
// the user explicitly asked us to.
if (!pkg.userRequired && pkgAreEquiv(oldPkg, pkg)) return
setAction(differences, 'update', pkg)
} else {
var name = moduleName(pkg)
// find any packages we're removing that share the same name and are equivalent
var removing = (toRemoveByName[name] || []).filter((rm) => pkgAreEquiv(rm.pkg, pkg))
var bundlesOrFromBundle = pkg.fromBundle || pkg.package.bundleDependencies
// if we have any removes that match AND we're not working with a bundle then upgrade to a move
if (removing.length && !bundlesOrFromBundle) {
var toMv = removing.shift()
toRemoveByName[name] = toRemoveByName[name].filter((rm) => rm !== toMv)
pkg.fromPath = toMv.pkg.path
setAction(differences, 'move', pkg)
delete toRemove[toMv.flatname]
// we don't generate add actions for things found in links (which already exist on disk)
} else if (!pkg.isInLink || !(pkg.fromBundle && pkg.fromBundle.isLink)) {
setAction(differences, 'add', pkg)
}
}
})
// finally generate our remove actions from any not consumed by moves
Object
.keys(toRemove)
.map((flatname) => toRemove[flatname])
.forEach((pkg) => setAction(differences, 'remove', pkg))
return filterActions(differences)
}
function filterActions (differences) {
const includeOpt = npm.config.get('optional')
const includeDev = npm.config.get('dev') ||
(!/^prod(uction)?$/.test(npm.config.get('only')) && !npm.config.get('production')) ||
/^dev(elopment)?$/.test(npm.config.get('only')) ||
/^dev(elopment)?$/.test(npm.config.get('also'))
const includeProd = !/^dev(elopment)?$/.test(npm.config.get('only'))
if (includeProd && includeDev && includeOpt) return differences
log.silly('diff-trees', 'filtering actions:', 'includeDev', includeDev, 'includeProd', includeProd, 'includeOpt', includeOpt)
return differences.filter((diff) => {
const pkg = diff[1]
const pkgIsOnlyDev = isOnlyDev(pkg)
const pkgIsOnlyOpt = isOnlyOptional(pkg)
if (!includeProd && pkgIsOnlyDev) return true
if (includeDev && pkgIsOnlyDev) return true
if (includeProd && !pkgIsOnlyDev && (includeOpt || !pkgIsOnlyOpt)) return true
return false
})
}

27
website/node_modules/npm/lib/install/exists.js generated vendored Normal file
View File

@@ -0,0 +1,27 @@
'use strict'
var fs = require('fs')
var inflight = require('inflight')
var accessError = require('./access-error.js')
var isFsAccessAvailable = require('./is-fs-access-available.js')
if (isFsAccessAvailable) {
module.exports = fsAccessImplementation
} else {
module.exports = fsStatImplementation
}
// exposed only for testing purposes
module.exports.fsAccessImplementation = fsAccessImplementation
module.exports.fsStatImplementation = fsStatImplementation
function fsAccessImplementation (dir, done) {
done = inflight('exists:' + dir, done)
if (!done) return
fs.access(dir, fs.F_OK, done)
}
function fsStatImplementation (dir, done) {
done = inflight('exists:' + dir, done)
if (!done) return
fs.stat(dir, function (er) { done(accessError(dir, er)) })
}

42
website/node_modules/npm/lib/install/flatten-tree.js generated vendored Normal file
View File

@@ -0,0 +1,42 @@
'use strict'
var validate = require('aproba')
var moduleName = require('../utils/module-name.js')
module.exports = flattenTree
module.exports.flatName = flatName
module.exports.flatNameFromTree = flatNameFromTree
function flattenTree (tree) {
validate('O', arguments)
var seen = new Set()
var flat = {}
var todo = [[tree, '/']]
while (todo.length) {
var next = todo.shift()
var pkg = next[0]
seen.add(pkg)
var path = next[1]
flat[path] = pkg
if (path !== '/') path += '/'
for (var ii = 0; ii < pkg.children.length; ++ii) {
var child = pkg.children[ii]
if (!seen.has(child)) {
todo.push([child, flatName(path, child)])
}
}
}
return flat
}
function flatName (path, child) {
validate('SO', arguments)
return path + (moduleName(child) || 'TOP')
}
function flatNameFromTree (tree) {
validate('O', arguments)
if (tree.isTop) return '/'
var path = flatNameFromTree(tree.parent)
if (path !== '/') path += '/'
return flatName(path, tree)
}

12
website/node_modules/npm/lib/install/get-requested.js generated vendored Normal file
View File

@@ -0,0 +1,12 @@
'use strict'
const npa = require('npm-package-arg')
const moduleName = require('../utils/module-name.js')
module.exports = function (child, reqBy) {
if (!child.requiredBy.length) return
if (!reqBy) reqBy = child.requiredBy[0]
const deps = reqBy.package.dependencies || {}
const devDeps = reqBy.package.devDependencies || {}
const name = moduleName(child)
return npa.resolve(name, deps[name] || devDeps[name], reqBy.realpath)
}

View File

@@ -0,0 +1,20 @@
'use strict'
module.exports = hasModernMeta
const npa = require('npm-package-arg')
const moduleName = require('../utils/module-name.js')
function isLink (child) {
return child.isLink || (child.parent && isLink(child.parent))
}
function hasModernMeta (child) {
if (!child) return false
const resolved = child.package._resolved && npa.resolve(moduleName(child), child.package._resolved)
const version = npa.resolve(moduleName(child), child.package.version)
return child.isTop ||
isLink(child) ||
child.fromBundle || child.package._inBundle ||
child.package._integrity || child.package._shasum ||
(resolved && resolved.type === 'git') || (version && version.type === 'git')
}

View File

@@ -0,0 +1,18 @@
'use strict'
var childPath = require('../utils/child-path.js')
var reset = require('./node.js').reset
module.exports = function inflateBundled (bundler, parent, children) {
children.forEach(function (child) {
if (child.fromBundle === bundler) return
reset(child)
child.fromBundle = bundler
child.isInLink = bundler.isLink
child.parent = parent
child.path = childPath(parent.path, child)
child.realpath = bundler.isLink ? child.realpath : childPath(parent.realpath, child)
child.isLink = child.isLink || parent.isLink || parent.target
inflateBundled(bundler, child, child.children)
})
}

View File

@@ -0,0 +1,233 @@
'use strict'
const BB = require('bluebird')
let addBundled
const childPath = require('../utils/child-path.js')
const createChild = require('./node.js').create
let fetchPackageMetadata
const inflateBundled = require('./inflate-bundled.js')
const moduleName = require('../utils/module-name.js')
const normalizePackageData = require('normalize-package-data')
const npm = require('../npm.js')
const realizeShrinkwrapSpecifier = require('./realize-shrinkwrap-specifier.js')
const validate = require('aproba')
const path = require('path')
const isRegistry = require('../utils/is-registry.js')
const hasModernMeta = require('./has-modern-meta.js')
const ssri = require('ssri')
const npa = require('npm-package-arg')
module.exports = function (tree, sw, opts, finishInflating) {
if (!fetchPackageMetadata) {
fetchPackageMetadata = BB.promisify(require('../fetch-package-metadata.js'))
addBundled = BB.promisify(fetchPackageMetadata.addBundled)
}
if (arguments.length === 3) {
finishInflating = opts
opts = {}
}
if (!npm.config.get('shrinkwrap') || !npm.config.get('package-lock')) {
return finishInflating()
}
tree.loaded = false
tree.hasRequiresFromLock = sw.requires
return inflateShrinkwrap(tree.path, tree, sw.dependencies, opts).then(
() => finishInflating(),
finishInflating
)
}
function inflateShrinkwrap (topPath, tree, swdeps, opts) {
if (!swdeps) return Promise.resolve()
if (!opts) opts = {}
const onDisk = {}
tree.children.forEach((child) => {
onDisk[moduleName(child)] = child
})
tree.children = []
return BB.each(Object.keys(swdeps), (name) => {
const sw = swdeps[name]
const dependencies = sw.dependencies || {}
const requested = realizeShrinkwrapSpecifier(name, sw, topPath)
return inflatableChild(
onDisk[name], name, topPath, tree, sw, requested, opts
).then((child) => {
child.hasRequiresFromLock = tree.hasRequiresFromLock
return inflateShrinkwrap(topPath, child, dependencies)
})
})
}
function normalizePackageDataNoErrors (pkg) {
try {
normalizePackageData(pkg)
} catch (ex) {
// don't care
}
}
function quotemeta (str) {
return str.replace(/([^A-Za-z_0-9/])/g, '\\$1')
}
function tarballToVersion (name, tb) {
const registry = quotemeta(npm.config.get('registry'))
.replace(/https?:/, 'https?:')
.replace(/([^/])$/, '$1/')
let matchRegTarball
if (name) {
const nameMatch = quotemeta(name)
matchRegTarball = new RegExp(`^${registry}${nameMatch}/-/${nameMatch}-(.*)[.]tgz$`)
} else {
matchRegTarball = new RegExp(`^${registry}(.*)?/-/\\1-(.*)[.]tgz$`)
}
const match = tb.match(matchRegTarball)
if (!match) return
return match[2] || match[1]
}
function inflatableChild (onDiskChild, name, topPath, tree, sw, requested, opts) {
validate('OSSOOOO|ZSSOOOO', arguments)
const usesIntegrity = (
requested.registry ||
requested.type === 'remote' ||
requested.type === 'file'
)
const regTarball = tarballToVersion(name, sw.version)
if (regTarball) {
sw.resolved = sw.version
sw.version = regTarball
}
if (sw.requires) Object.keys(sw.requires).map(_ => { sw.requires[_] = tarballToVersion(_, sw.requires[_]) || sw.requires[_] })
const modernLink = requested.type === 'directory' && !sw.from
if (hasModernMeta(onDiskChild) && childIsEquivalent(sw, requested, onDiskChild)) {
// The version on disk matches the shrinkwrap entry.
if (!onDiskChild.fromShrinkwrap) onDiskChild.fromShrinkwrap = requested
onDiskChild.package._requested = requested
onDiskChild.package._spec = requested.rawSpec
onDiskChild.package._where = topPath
onDiskChild.package._optional = sw.optional
onDiskChild.package._development = sw.dev
onDiskChild.package._inBundle = sw.bundled
onDiskChild.fromBundle = (sw.bundled || onDiskChild.package._inBundle) ? tree.fromBundle || tree : null
if (!onDiskChild.package._args) onDiskChild.package._args = []
onDiskChild.package._args.push([String(requested), topPath])
// non-npm registries can and will return unnormalized data, plus
// even the npm registry may have package data normalized with older
// normalization rules. This ensures we get package data in a consistent,
// stable format.
normalizePackageDataNoErrors(onDiskChild.package)
onDiskChild.swRequires = sw.requires
tree.children.push(onDiskChild)
return BB.resolve(onDiskChild)
} else if ((sw.version && (sw.integrity || !usesIntegrity) && (requested.type !== 'directory' || modernLink)) || sw.bundled) {
// The shrinkwrap entry has an integrity field. We can fake a pkg to get
// the installer to do a content-address fetch from the cache, if possible.
return BB.resolve(makeFakeChild(name, topPath, tree, sw, requested))
} else {
// It's not on disk, and we can't just look it up by address -- do a full
// fpm/inflate bundle pass. For registry deps, this will go straight to the
// tarball URL, as if it were a remote tarball dep.
return fetchChild(topPath, tree, sw, requested)
}
}
function isGit (sw) {
const version = npa.resolve(sw.name, sw.version)
return (version && version.type === 'git')
}
function makeFakeChild (name, topPath, tree, sw, requested) {
const from = sw.from || requested.raw
const pkg = {
name: name,
version: sw.version,
_id: name + '@' + sw.version,
_resolved: sw.resolved || (isGit(sw) && sw.version),
_requested: requested,
_optional: sw.optional,
_development: sw.dev,
_inBundle: sw.bundled,
_integrity: sw.integrity,
_from: from,
_spec: requested.rawSpec,
_where: topPath,
_args: [[requested.toString(), topPath]],
dependencies: sw.requires
}
if (!sw.bundled) {
const bundleDependencies = Object.keys(sw.dependencies || {}).filter((d) => sw.dependencies[d].bundled)
if (bundleDependencies.length === 0) {
pkg.bundleDependencies = bundleDependencies
}
}
const child = createChild({
package: pkg,
loaded: false,
parent: tree,
children: [],
fromShrinkwrap: requested,
fakeChild: sw,
fromBundle: sw.bundled ? tree.fromBundle || tree : null,
path: childPath(tree.path, pkg),
realpath: requested.type === 'directory' ? requested.fetchSpec : childPath(tree.realpath, pkg),
location: (tree.location === '/' ? '' : tree.location + '/') + pkg.name,
isLink: requested.type === 'directory',
isInLink: tree.isLink,
swRequires: sw.requires
})
tree.children.push(child)
return child
}
function fetchChild (topPath, tree, sw, requested) {
return fetchPackageMetadata(requested, topPath).then((pkg) => {
pkg._from = sw.from || requested.raw
pkg._optional = sw.optional
pkg._development = sw.dev
pkg._inBundle = false
return addBundled(pkg).then(() => pkg)
}).then((pkg) => {
var isLink = pkg._requested.type === 'directory'
const child = createChild({
package: pkg,
loaded: false,
parent: tree,
fromShrinkwrap: requested,
path: childPath(tree.path, pkg),
realpath: isLink ? requested.fetchSpec : childPath(tree.realpath, pkg),
children: pkg._bundled || [],
location: (tree.location === '/' ? '' : tree.location + '/') + pkg.name,
fromBundle: null,
isLink: isLink,
isInLink: tree.isLink,
swRequires: sw.requires
})
tree.children.push(child)
if (pkg._bundled) {
delete pkg._bundled
inflateBundled(child, child, child.children)
}
return child
})
}
function childIsEquivalent (sw, requested, child) {
if (!child) return false
if (child.fromShrinkwrap) return true
if (
sw.integrity &&
child.package._integrity &&
ssri.parse(sw.integrity).match(child.package._integrity)
) return true
if (child.isLink && requested.type === 'directory') return path.relative(child.realpath, requested.fetchSpec) === ''
if (sw.resolved) return child.package._resolved === sw.resolved
if (!isRegistry(requested) && sw.from) return child.package._from === sw.from
if (!isRegistry(requested) && child.package._resolved) return sw.version === child.package._resolved
return child.package.version === sw.version
}

8
website/node_modules/npm/lib/install/is-dev-dep.js generated vendored Normal file
View File

@@ -0,0 +1,8 @@
'use strict'
module.exports = isDevDep
function isDevDep (node, name) {
return node.package &&
node.package.devDependencies &&
node.package.devDependencies[name]
}

27
website/node_modules/npm/lib/install/is-extraneous.js generated vendored Normal file
View File

@@ -0,0 +1,27 @@
'use strict'
module.exports = isExtraneous
function isExtraneous (tree) {
var result = !isNotExtraneous(tree)
return result
}
function topHasNoPjson (tree) {
var top = tree
while (!top.isTop) top = top.parent
return top.error
}
function isNotExtraneous (tree, isCycle) {
if (!isCycle) isCycle = {}
if (tree.isTop || tree.userRequired) {
return true
} else if (isCycle[tree.path]) {
return topHasNoPjson(tree)
} else {
isCycle[tree.path] = true
return tree.requiredBy && tree.requiredBy.some(function (node) {
return isNotExtraneous(node, Object.create(isCycle))
})
}
}

View File

@@ -0,0 +1,22 @@
'use strict'
var fs = require('fs')
var semver = require('semver')
var isWindows = process.platform === 'win32'
// fs.access first introduced in node 0.12 / io.js
if (!fs.access) {
module.exports = false
} else if (!isWindows) {
// fs.access always works on non-Windows OSes
module.exports = true
} else {
// The Windows implementation of `fs.access` has a bug where it will
// sometimes return access errors all the time for directories, even
// when access is available. As all we actually test ARE directories, this
// is a bit of a problem.
// This was fixed in io.js version 1.5.0
// As of 2015-07-20, it is still unfixed in node:
// https://github.com/joyent/node/issues/25657
module.exports = semver.gte(process.version, '1.5.0')
}

35
website/node_modules/npm/lib/install/is-only-dev.js generated vendored Normal file
View File

@@ -0,0 +1,35 @@
'use strict'
module.exports = isOnlyDev
const moduleName = require('../utils/module-name.js')
const isDevDep = require('./is-dev-dep.js')
const isProdDep = require('./is-prod-dep.js')
// Returns true if the module `node` is only required direcctly as a dev
// dependency of the top level or transitively _from_ top level dev
// dependencies.
// Dual mode modules (that are both dev AND prod) should return false.
function isOnlyDev (node, seen) {
if (!seen) seen = new Set()
return node.requiredBy.length && node.requiredBy.every(andIsOnlyDev(moduleName(node), seen))
}
// There is a known limitation with this implementation: If a dependency is
// ONLY required by cycles that are detached from the top level then it will
// ultimately return true.
//
// This is ok though: We don't allow shrinkwraps with extraneous deps and
// these situation is caught by the extraneous checker before we get here.
function andIsOnlyDev (name, seen) {
return function (req) {
const isDev = isDevDep(req, name)
const isProd = isProdDep(req, name)
if (req.isTop) {
return isDev && !isProd
} else {
if (seen.has(req)) return true
seen.add(req)
return isOnlyDev(req, seen)
}
}
}

View File

@@ -0,0 +1,19 @@
'use strict'
module.exports = isOptional
const isOptDep = require('./is-opt-dep.js')
function isOptional (node, seen) {
if (!seen) seen = new Set()
// If a node is not required by anything, then we've reached
// the top level package.
if (seen.has(node) || node.requiredBy.length === 0) {
return false
}
seen.add(node)
const swOptional = node.fromShrinkwrap && node.package._optional
return node.requiredBy.every(function (req) {
if (req.fakeChild && swOptional) return true
return isOptDep(req, node.package.name) || isOptional(req, seen)
})
}

8
website/node_modules/npm/lib/install/is-opt-dep.js generated vendored Normal file
View File

@@ -0,0 +1,8 @@
'use strict'
module.exports = isOptDep
function isOptDep (node, name) {
return node.package &&
node.package.optionalDependencies &&
node.package.optionalDependencies[name]
}

9
website/node_modules/npm/lib/install/is-prod-dep.js generated vendored Normal file
View File

@@ -0,0 +1,9 @@
'use strict'
module.exports = isProdDep
function isProdDep (node, name) {
return node.package &&
node.package.dependencies &&
node.package.dependencies[name]
}

View File

@@ -0,0 +1,8 @@
'use strict'
var uniqueFilename = require('unique-filename')
var moduleName = require('../utils/module-name.js')
module.exports = moduleStagingPath
function moduleStagingPath (staging, pkg) {
return uniqueFilename(staging, moduleName(pkg), pkg.realpath)
}

View File

@@ -0,0 +1,140 @@
'use strict'
var union = require('lodash.union')
var without = require('lodash.without')
var validate = require('aproba')
var flattenTree = require('./flatten-tree.js')
var isExtraneous = require('./is-extraneous.js')
var validateAllPeerDeps = require('./deps.js').validateAllPeerDeps
var packageId = require('../utils/package-id.js')
var moduleName = require('../utils/module-name.js')
var npm = require('../npm.js')
// Return true if tree is a part of a cycle that:
// A) Never connects to the top of the tree
// B) Has not not had a point in the cycle arbitrarily declared its top
// yet.
function isDisconnectedCycle (tree, seen) {
if (!seen) seen = {}
if (tree.isTop || tree.cycleTop || tree.requiredBy.length === 0) {
return false
} else if (seen[tree.path]) {
return true
} else {
seen[tree.path] = true
return tree.requiredBy.every(function (node) {
return isDisconnectedCycle(node, Object.create(seen))
})
}
}
var mutateIntoLogicalTree = module.exports = function (tree) {
validate('O', arguments)
validateAllPeerDeps(tree, function (tree, pkgname, version) {
if (!tree.missingPeers) tree.missingPeers = {}
tree.missingPeers[pkgname] = version
})
var flat = flattenTree(tree)
Object.keys(flat).sort().forEach(function (flatname) {
var node = flat[flatname]
if (!(node.requiredBy && node.requiredBy.length)) return
if (node.parent) {
// If a node is a cycle that never reaches the root of the logical
// tree then we'll leave it attached to the root, or else it
// would go missing. Further we'll note that this is the node in the
// cycle that we picked arbitrarily to be the one attached to the root.
// others will fall
if (isDisconnectedCycle(node)) {
node.cycleTop = true
// Nor do we want to disconnect non-cyclical extraneous modules from the tree.
} else if (node.requiredBy.length) {
// regular deps though, we do, as we're moving them into the capable
// hands of the modules that require them.
node.parent.children = without(node.parent.children, node)
}
}
node.requiredBy.forEach(function (parentNode) {
parentNode.children = union(parentNode.children, [node])
})
})
return tree
}
module.exports.asReadInstalled = function (tree) {
mutateIntoLogicalTree(tree)
return translateTree(tree)
}
function translateTree (tree) {
return translateTree_(tree, new Set())
}
function translateTree_ (tree, seen) {
var pkg = tree.package
if (seen.has(tree)) return pkg
seen.add(tree)
if (pkg._dependencies) return pkg
pkg._dependencies = pkg.dependencies
pkg.dependencies = {}
tree.children.forEach(function (child) {
const dep = pkg.dependencies[moduleName(child)] = translateTree_(child, seen)
if (child.fakeChild) {
dep.missing = true
dep.optional = child.package._optional
dep.requiredBy = child.package._spec
}
})
function markMissing (name, requiredBy) {
if (pkg.dependencies[name]) {
if (pkg.dependencies[name].missing) return
pkg.dependencies[name].invalid = true
pkg.dependencies[name].realName = name
pkg.dependencies[name].extraneous = false
} else {
pkg.dependencies[name] = {
requiredBy: requiredBy,
missing: true,
optional: !!pkg.optionalDependencies[name]
}
}
}
Object.keys(tree.missingDeps).forEach(function (name) {
markMissing(name, tree.missingDeps[name])
})
Object.keys(tree.missingDevDeps).forEach(function (name) {
markMissing(name, tree.missingDevDeps[name])
})
var checkForMissingPeers = (tree.parent ? [] : [tree]).concat(tree.children)
checkForMissingPeers.filter(function (child) {
return child.missingPeers
}).forEach(function (child) {
Object.keys(child.missingPeers).forEach(function (pkgname) {
var version = child.missingPeers[pkgname]
var peerPkg = pkg.dependencies[pkgname]
if (!peerPkg) {
peerPkg = pkg.dependencies[pkgname] = {
_id: pkgname + '@' + version,
name: pkgname,
version: version
}
}
if (!peerPkg.peerMissing) peerPkg.peerMissing = []
peerPkg.peerMissing.push({
requiredBy: packageId(child),
requires: pkgname + '@' + version
})
})
})
pkg.path = tree.path
pkg.error = tree.error
pkg.extraneous = !tree.isTop && (!tree.parent.isTop || !tree.parent.error) && !npm.config.get('global') && isExtraneous(tree)
if (tree.target && tree.parent && !tree.parent.target) pkg.link = tree.realpath
return pkg
}

77
website/node_modules/npm/lib/install/node.js generated vendored Normal file
View File

@@ -0,0 +1,77 @@
'use strict'
var defaultTemplate = {
package: {
version: '',
dependencies: {},
devDependencies: {},
optionalDependencies: {}
},
loaded: false,
children: [],
requiredBy: [],
requires: [],
missingDeps: {},
missingDevDeps: {},
phantomChildren: {},
path: null,
realpath: null,
location: null,
userRequired: false,
save: false,
saveSpec: null,
isTop: false,
fromBundle: false
}
function isLink (node) {
return node && node.isLink
}
function isInLink (node) {
return node && (node.isInLink || node.isLink)
}
var create = exports.create = function (node, template, isNotTop) {
if (!template) template = defaultTemplate
Object.keys(template).forEach(function (key) {
if (template[key] != null && typeof template[key] === 'object' && !(template[key] instanceof Array)) {
if (!node[key]) node[key] = {}
return create(node[key], template[key], true)
}
if (node[key] != null) return
node[key] = template[key]
})
if (!isNotTop) {
// isLink is true for the symlink and everything inside it.
// by contrast, isInLink is true for only the things inside a link
if (node.isLink == null) node.isLink = isLink(node.parent)
if (node.isInLink == null) node.isInLink = isInLink(node.parent)
if (node.fromBundle == null) {
node.fromBundle = false
}
}
return node
}
exports.reset = function (node) {
reset(node, new Set())
}
function reset (node, seen) {
if (seen.has(node)) return
seen.add(node)
var child = create(node)
// FIXME: cleaning up after read-package-json's mess =(
if (child.package._id === '@') delete child.package._id
child.isTop = false
child.requiredBy = []
child.requires = []
child.missingDeps = {}
child.missingDevDeps = {}
child.phantomChildren = {}
child.location = null
child.children.forEach(function (child) { reset(child, seen) })
}

108
website/node_modules/npm/lib/install/read-shrinkwrap.js generated vendored Normal file
View File

@@ -0,0 +1,108 @@
'use strict'
const BB = require('bluebird')
const fs = require('graceful-fs')
const iferr = require('iferr')
const inflateShrinkwrap = require('./inflate-shrinkwrap.js')
const log = require('npmlog')
const parseJSON = require('../utils/parse-json.js')
const path = require('path')
const PKGLOCK_VERSION = require('../npm.js').lockfileVersion
const readFileAsync = BB.promisify(fs.readFile)
module.exports = readShrinkwrap
function readShrinkwrap (child, next) {
if (child.package._shrinkwrap) return process.nextTick(next)
BB.join(
maybeReadFile('npm-shrinkwrap.json', child),
// Don't read non-root lockfiles
child.isTop && maybeReadFile('package-lock.json', child),
child.isTop && maybeReadFile('package.json', child),
(shrinkwrap, lockfile, pkgJson) => {
if (shrinkwrap && lockfile) {
log.warn('read-shrinkwrap', 'Ignoring package-lock.json because there is already an npm-shrinkwrap.json. Please use only one of the two.')
}
const name = shrinkwrap ? 'npm-shrinkwrap.json' : 'package-lock.json'
const parsed = parsePkgLock(shrinkwrap || lockfile, name)
if (parsed && parsed.lockfileVersion !== PKGLOCK_VERSION) {
log.warn('read-shrinkwrap', `This version of npm is compatible with lockfileVersion@${PKGLOCK_VERSION}, but ${name} was generated for lockfileVersion@${parsed.lockfileVersion || 0}. I'll try to do my best with it!`)
}
child.package._shrinkwrap = parsed
}
).then(() => next(), next)
}
function maybeReadFile (name, child) {
return readFileAsync(
path.join(child.path, name),
'utf8'
).catch({code: 'ENOENT'}, () => null)
}
module.exports.andInflate = function (child, next) {
readShrinkwrap(child, iferr(next, function () {
if (child.package._shrinkwrap) {
return inflateShrinkwrap(child, child.package._shrinkwrap || {}, next)
} else {
return next()
}
}))
}
const PARENT_RE = /\|{7,}/g
const OURS_RE = /<{7,}/g
const THEIRS_RE = /={7,}/g
const END_RE = />{7,}/g
module.exports._isDiff = isDiff
function isDiff (str) {
return str.match(OURS_RE) && str.match(THEIRS_RE) && str.match(END_RE)
}
module.exports._parsePkgLock = parsePkgLock
function parsePkgLock (str, filename) {
if (!str) { return null }
try {
return parseJSON(str)
} catch (e) {
if (isDiff(str)) {
log.warn('conflict', `A git conflict was detected in ${filename}. Attempting to auto-resolve.`)
log.warn('conflict', 'To make this happen automatically on git rebase/merge, consider using the npm-merge-driver:')
log.warn('conflict', '$ npx npm-merge-driver install -g')
const pieces = str.split(/[\n\r]+/g).reduce((acc, line) => {
if (line.match(PARENT_RE)) acc.state = 'parent'
else if (line.match(OURS_RE)) acc.state = 'ours'
else if (line.match(THEIRS_RE)) acc.state = 'theirs'
else if (line.match(END_RE)) acc.state = 'top'
else {
if (acc.state === 'top' || acc.state === 'ours') acc.ours += line
if (acc.state === 'top' || acc.state === 'theirs') acc.theirs += line
if (acc.state === 'top' || acc.state === 'parent') acc.parent += line
}
return acc
}, {
state: 'top',
ours: '',
theirs: '',
parent: ''
})
try {
const ours = parseJSON(pieces.ours)
const theirs = parseJSON(pieces.theirs)
return reconcileLockfiles(ours, theirs)
} catch (_e) {
log.error('conflict', `Automatic conflict resolution failed. Please manually resolve conflicts in ${filename} and try again.`)
log.silly('conflict', `Error during resolution: ${_e}`)
throw e
}
} else {
throw e
}
}
}
function reconcileLockfiles (parent, ours, theirs) {
return Object.assign({}, ours, theirs)
}

View File

@@ -0,0 +1,22 @@
'use strict'
var npa = require('npm-package-arg')
const isRegistry = require('../utils/is-registry.js')
module.exports = function (name, sw, where) {
try {
if (sw.version && sw.integrity) {
return npa.resolve(name, sw.version, where)
} else if (sw.from) {
const spec = npa(sw.from, where)
if (isRegistry(spec) && sw.version) {
return npa.resolve(name, sw.version, where)
} else if (!sw.resolved) {
return spec
}
}
if (sw.resolved) {
return npa.resolve(name, sw.resolved, where)
}
} catch (_) { }
return npa.resolve(name, sw.version, where)
}

View File

@@ -0,0 +1,31 @@
'use strict'
var path = require('path')
var moduleName = require('../utils/module-name.js')
module.exports = reportOptionalFailure
function top (tree) {
if (tree.parent) return top(tree.parent)
return tree
}
function reportOptionalFailure (tree, what, error) {
var topTree = top(tree)
if (!topTree.warnings) topTree.warnings = []
var id
if (what) {
var depVer = tree.package.dependencies && tree.package.dependencies[what]
var optDepVer = tree.package.optionalDependencies && tree.package.optionalDependencies[what]
var devDepVer = tree.package.devDependencies && tree.package.devDependencies[what]
var version = depVer || optDepVer || devDepVer
id = what + (version ? '@' + version : '')
} else {
id = tree._id || moduleName(tree) + (tree.package.version ? '@' + tree.package.version : '')
}
var location = path.relative(topTree.path, tree.path)
if (what) location = path.join(location, 'node_modules', what)
error.optional = id
error.location = location
topTree.warnings.push(error)
}

189
website/node_modules/npm/lib/install/save.js generated vendored Normal file
View File

@@ -0,0 +1,189 @@
'use strict'
const deepSortObject = require('../utils/deep-sort-object.js')
const detectIndent = require('detect-indent')
const detectNewline = require('detect-newline')
const fs = require('graceful-fs')
const iferr = require('iferr')
const log = require('npmlog')
const moduleName = require('../utils/module-name.js')
const npm = require('../npm.js')
const parseJSON = require('../utils/parse-json.js')
const path = require('path')
const stringifyPackage = require('stringify-package')
const validate = require('aproba')
const without = require('lodash.without')
const writeFileAtomic = require('write-file-atomic')
// if the -S|--save option is specified, then write installed packages
// as dependencies to a package.json file.
exports.saveRequested = function (tree, andReturn) {
validate('OF', arguments)
savePackageJson(tree, andWarnErrors(andSaveShrinkwrap(tree, andReturn)))
}
function andSaveShrinkwrap (tree, andReturn) {
validate('OF', arguments)
return function (er) {
validate('E', arguments)
saveShrinkwrap(tree, andWarnErrors(andReturn))
}
}
function andWarnErrors (cb) {
validate('F', arguments)
return function (er) {
if (er) log.warn('saveError', er.message)
arguments[0] = null
cb.apply(null, arguments)
}
}
exports.saveShrinkwrap = saveShrinkwrap
function saveShrinkwrap (tree, next) {
validate('OF', arguments)
if (!npm.config.get('shrinkwrap') || !npm.config.get('package-lock')) {
return next()
}
require('../shrinkwrap.js').createShrinkwrap(tree, {silent: false}, next)
}
function savePackageJson (tree, next) {
validate('OF', arguments)
var saveBundle = npm.config.get('save-bundle')
// each item in the tree is a top-level thing that should be saved
// to the package.json file.
// The relevant tree shape is { <folder>: {what:<pkg>} }
var saveTarget = path.resolve(tree.path, 'package.json')
// don't use readJson, because we don't want to do all the other
// tricky npm-specific stuff that's in there.
fs.readFile(saveTarget, 'utf8', iferr(next, function (packagejson) {
const indent = detectIndent(packagejson).indent
const newline = detectNewline(packagejson)
try {
tree.package = parseJSON(packagejson)
} catch (ex) {
return next(ex)
}
// If we're saving bundled deps, normalize the key before we start
if (saveBundle) {
var bundle = tree.package.bundleDependencies || tree.package.bundledDependencies
delete tree.package.bundledDependencies
if (!Array.isArray(bundle)) bundle = []
}
var toSave = getThingsToSave(tree)
var toRemove = getThingsToRemove(tree)
var savingTo = {}
toSave.forEach(function (pkg) { if (pkg.save) savingTo[pkg.save] = true })
toRemove.forEach(function (pkg) { if (pkg.save) savingTo[pkg.save] = true })
Object.keys(savingTo).forEach(function (save) {
if (!tree.package[save]) tree.package[save] = {}
})
log.verbose('saving', toSave)
const types = ['dependencies', 'devDependencies', 'optionalDependencies']
toSave.forEach(function (pkg) {
if (pkg.save) tree.package[pkg.save][pkg.name] = pkg.spec
const movedFrom = []
for (let saveType of types) {
if (
pkg.save !== saveType &&
tree.package[saveType] &&
tree.package[saveType][pkg.name]
) {
movedFrom.push(saveType)
delete tree.package[saveType][pkg.name]
}
}
if (movedFrom.length) {
log.notice('save', `${pkg.name} is being moved from ${movedFrom.join(' and ')} to ${pkg.save}`)
}
if (saveBundle) {
var ii = bundle.indexOf(pkg.name)
if (ii === -1) bundle.push(pkg.name)
}
})
toRemove.forEach(function (pkg) {
if (pkg.save) delete tree.package[pkg.save][pkg.name]
if (saveBundle) {
bundle = without(bundle, pkg.name)
}
})
Object.keys(savingTo).forEach(function (key) {
tree.package[key] = deepSortObject(tree.package[key])
})
if (saveBundle) {
tree.package.bundleDependencies = deepSortObject(bundle)
}
var json = stringifyPackage(tree.package, indent, newline)
if (json === packagejson) {
log.verbose('shrinkwrap', 'skipping write for package.json because there were no changes.')
next()
} else {
writeFileAtomic(saveTarget, json, next)
}
}))
}
exports.getSaveType = function (tree, arg) {
if (arguments.length) validate('OO', arguments)
var globalInstall = npm.config.get('global')
var noSaveFlags = !npm.config.get('save') &&
!npm.config.get('save-dev') &&
!npm.config.get('save-prod') &&
!npm.config.get('save-optional')
if (globalInstall || noSaveFlags) return null
if (npm.config.get('save-optional')) {
return 'optionalDependencies'
} else if (npm.config.get('save-dev')) {
return 'devDependencies'
} else if (npm.config.get('save-prod')) {
return 'dependencies'
} else {
if (arg) {
var name = moduleName(arg)
if (tree.package.optionalDependencies[name]) {
return 'optionalDependencies'
} else if (tree.package.devDependencies[name]) {
return 'devDependencies'
}
}
return 'dependencies'
}
}
function getThingsToSave (tree) {
validate('O', arguments)
var toSave = tree.children.filter(function (child) {
return child.save
}).map(function (child) {
return {
name: moduleName(child),
spec: child.saveSpec,
save: child.save
}
})
return toSave
}
function getThingsToRemove (tree) {
validate('O', arguments)
if (!tree.removedChildren) return []
var toRemove = tree.removedChildren.map(function (child) {
return {
name: moduleName(child),
save: child.save
}
})
return toRemove
}

Some files were not shown because too many files have changed in this diff Show More